[PR ipa/88933] Careful CFG cleanup in IPA-CP function transformation
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63 #include "tree-cfgcleanup.h"
64
65 /* I'm not real happy about this, but we need to handle gimple and
66 non-gimple trees. */
67
68 /* Inlining, Cloning, Versioning, Parallelization
69
70 Inlining: a function body is duplicated, but the PARM_DECLs are
71 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72 MODIFY_EXPRs that store to a dedicated returned-value variable.
73 The duplicated eh_region info of the copy will later be appended
74 to the info for the caller; the eh_region info in copied throwing
75 statements and RESX statements are adjusted accordingly.
76
77 Cloning: (only in C++) We have one body for a con/de/structor, and
78 multiple function decls, each with a unique parameter list.
79 Duplicate the body, using the given splay tree; some parameters
80 will become constants (like 0 or 1).
81
82 Versioning: a function body is duplicated and the result is a new
83 function rather than into blocks of an existing function as with
84 inlining. Some parameters will become constants.
85
86 Parallelization: a region of a function is duplicated resulting in
87 a new function. Variables may be replaced with complex expressions
88 to enable shared variable semantics.
89
90 All of these will simultaneously lookup any callgraph edges. If
91 we're going to inline the duplicated function body, and the given
92 function has some cloned callgraph nodes (one for each place this
93 function will be inlined) those callgraph edges will be duplicated.
94 If we're cloning the body, those callgraph edges will be
95 updated to point into the new body. (Note that the original
96 callgraph node and edge list will not be altered.)
97
98 See the CALL_EXPR handling case in copy_tree_body_r (). */
99
100 /* To Do:
101
102 o In order to make inlining-on-trees work, we pessimized
103 function-local static constants. In particular, they are now
104 always output, even when not addressed. Fix this by treating
105 function-local static constants just like global static
106 constants; the back-end already knows not to output them if they
107 are not needed.
108
109 o Provide heuristics to clamp inlining of recursive template
110 calls? */
111
112
113 /* Weights that estimate_num_insns uses to estimate the size of the
114 produced code. */
115
116 eni_weights eni_size_weights;
117
118 /* Weights that estimate_num_insns uses to estimate the time necessary
119 to execute the produced code. */
120
121 eni_weights eni_time_weights;
122
123 /* Prototypes. */
124
125 static tree declare_return_variable (copy_body_data *, tree, tree,
126 basic_block);
127 static void remap_block (tree *, copy_body_data *);
128 static void copy_bind_expr (tree *, int *, copy_body_data *);
129 static void declare_inline_vars (tree, tree);
130 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131 static void prepend_lexical_block (tree current_block, tree new_block);
132 static tree copy_decl_to_var (tree, copy_body_data *);
133 static tree copy_result_decl_to_var (tree, copy_body_data *);
134 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
135 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
136 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
137
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
140
141 void
142 insert_decl_map (copy_body_data *id, tree key, tree value)
143 {
144 id->decl_map->put (key, value);
145
146 /* Always insert an identity map as well. If we see this same new
147 node again, we won't want to duplicate it a second time. */
148 if (key != value)
149 id->decl_map->put (value, value);
150 }
151
152 /* Insert a tree->tree mapping for ID. This is only used for
153 variables. */
154
155 static void
156 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
157 {
158 if (!gimple_in_ssa_p (id->src_cfun))
159 return;
160
161 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
162 return;
163
164 if (!target_for_debug_bind (key))
165 return;
166
167 gcc_assert (TREE_CODE (key) == PARM_DECL);
168 gcc_assert (VAR_P (value));
169
170 if (!id->debug_map)
171 id->debug_map = new hash_map<tree, tree>;
172
173 id->debug_map->put (key, value);
174 }
175
176 /* If nonzero, we're remapping the contents of inlined debug
177 statements. If negative, an error has occurred, such as a
178 reference to a variable that isn't available in the inlined
179 context. */
180 static int processing_debug_stmt = 0;
181
182 /* Construct new SSA name for old NAME. ID is the inline context. */
183
184 static tree
185 remap_ssa_name (tree name, copy_body_data *id)
186 {
187 tree new_tree, var;
188 tree *n;
189
190 gcc_assert (TREE_CODE (name) == SSA_NAME);
191
192 n = id->decl_map->get (name);
193 if (n)
194 return unshare_expr (*n);
195
196 if (processing_debug_stmt)
197 {
198 if (SSA_NAME_IS_DEFAULT_DEF (name)
199 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
200 && id->entry_bb == NULL
201 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
202 {
203 tree vexpr = make_node (DEBUG_EXPR_DECL);
204 gimple *def_temp;
205 gimple_stmt_iterator gsi;
206 tree val = SSA_NAME_VAR (name);
207
208 n = id->decl_map->get (val);
209 if (n != NULL)
210 val = *n;
211 if (TREE_CODE (val) != PARM_DECL
212 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
213 {
214 processing_debug_stmt = -1;
215 return name;
216 }
217 n = id->decl_map->get (val);
218 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
219 return *n;
220 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
221 DECL_ARTIFICIAL (vexpr) = 1;
222 TREE_TYPE (vexpr) = TREE_TYPE (name);
223 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
224 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
225 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
226 insert_decl_map (id, val, vexpr);
227 return vexpr;
228 }
229
230 processing_debug_stmt = -1;
231 return name;
232 }
233
234 /* Remap anonymous SSA names or SSA names of anonymous decls. */
235 var = SSA_NAME_VAR (name);
236 if (!var
237 || (!SSA_NAME_IS_DEFAULT_DEF (name)
238 && VAR_P (var)
239 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
240 && DECL_ARTIFICIAL (var)
241 && DECL_IGNORED_P (var)
242 && !DECL_NAME (var)))
243 {
244 struct ptr_info_def *pi;
245 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
246 if (!var && SSA_NAME_IDENTIFIER (name))
247 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
248 insert_decl_map (id, name, new_tree);
249 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
250 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
251 /* At least IPA points-to info can be directly transferred. */
252 if (id->src_cfun->gimple_df
253 && id->src_cfun->gimple_df->ipa_pta
254 && POINTER_TYPE_P (TREE_TYPE (name))
255 && (pi = SSA_NAME_PTR_INFO (name))
256 && !pi->pt.anything)
257 {
258 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
259 new_pi->pt = pi->pt;
260 }
261 return new_tree;
262 }
263
264 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
265 in copy_bb. */
266 new_tree = remap_decl (var, id);
267
268 /* We might've substituted constant or another SSA_NAME for
269 the variable.
270
271 Replace the SSA name representing RESULT_DECL by variable during
272 inlining: this saves us from need to introduce PHI node in a case
273 return value is just partly initialized. */
274 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
275 && (!SSA_NAME_VAR (name)
276 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
277 || !id->transform_return_to_modify))
278 {
279 struct ptr_info_def *pi;
280 new_tree = make_ssa_name (new_tree);
281 insert_decl_map (id, name, new_tree);
282 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
283 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
284 /* At least IPA points-to info can be directly transferred. */
285 if (id->src_cfun->gimple_df
286 && id->src_cfun->gimple_df->ipa_pta
287 && POINTER_TYPE_P (TREE_TYPE (name))
288 && (pi = SSA_NAME_PTR_INFO (name))
289 && !pi->pt.anything)
290 {
291 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
292 new_pi->pt = pi->pt;
293 }
294 if (SSA_NAME_IS_DEFAULT_DEF (name))
295 {
296 /* By inlining function having uninitialized variable, we might
297 extend the lifetime (variable might get reused). This cause
298 ICE in the case we end up extending lifetime of SSA name across
299 abnormal edge, but also increase register pressure.
300
301 We simply initialize all uninitialized vars by 0 except
302 for case we are inlining to very first BB. We can avoid
303 this for all BBs that are not inside strongly connected
304 regions of the CFG, but this is expensive to test. */
305 if (id->entry_bb
306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
307 && (!SSA_NAME_VAR (name)
308 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
309 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
310 0)->dest
311 || EDGE_COUNT (id->entry_bb->preds) != 1))
312 {
313 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
314 gimple *init_stmt;
315 tree zero = build_zero_cst (TREE_TYPE (new_tree));
316
317 init_stmt = gimple_build_assign (new_tree, zero);
318 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
319 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
320 }
321 else
322 {
323 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
324 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
325 }
326 }
327 }
328 else
329 insert_decl_map (id, name, new_tree);
330 return new_tree;
331 }
332
333 /* Remap DECL during the copying of the BLOCK tree for the function. */
334
335 tree
336 remap_decl (tree decl, copy_body_data *id)
337 {
338 tree *n;
339
340 /* We only remap local variables in the current function. */
341
342 /* See if we have remapped this declaration. */
343
344 n = id->decl_map->get (decl);
345
346 if (!n && processing_debug_stmt)
347 {
348 processing_debug_stmt = -1;
349 return decl;
350 }
351
352 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
353 necessary DECLs have already been remapped and we do not want to duplicate
354 a decl coming from outside of the sequence we are copying. */
355 if (!n
356 && id->prevent_decl_creation_for_types
357 && id->remapping_type_depth > 0
358 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
359 return decl;
360
361 /* If we didn't already have an equivalent for this declaration, create one
362 now. */
363 if (!n)
364 {
365 /* Make a copy of the variable or label. */
366 tree t = id->copy_decl (decl, id);
367
368 /* Remember it, so that if we encounter this local entity again
369 we can reuse this copy. Do this early because remap_type may
370 need this decl for TYPE_STUB_DECL. */
371 insert_decl_map (id, decl, t);
372
373 if (!DECL_P (t))
374 return t;
375
376 /* Remap types, if necessary. */
377 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
378 if (TREE_CODE (t) == TYPE_DECL)
379 {
380 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
381
382 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
383 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
384 is not set on the TYPE_DECL, for example in LTO mode. */
385 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
386 {
387 tree x = build_variant_type_copy (TREE_TYPE (t));
388 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
389 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
390 DECL_ORIGINAL_TYPE (t) = x;
391 }
392 }
393
394 /* Remap sizes as necessary. */
395 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
396 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
397
398 /* If fields, do likewise for offset and qualifier. */
399 if (TREE_CODE (t) == FIELD_DECL)
400 {
401 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
402 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
403 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
404 }
405
406 return t;
407 }
408
409 if (id->do_not_unshare)
410 return *n;
411 else
412 return unshare_expr (*n);
413 }
414
415 static tree
416 remap_type_1 (tree type, copy_body_data *id)
417 {
418 tree new_tree, t;
419
420 /* We do need a copy. build and register it now. If this is a pointer or
421 reference type, remap the designated type and make a new pointer or
422 reference type. */
423 if (TREE_CODE (type) == POINTER_TYPE)
424 {
425 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
426 TYPE_MODE (type),
427 TYPE_REF_CAN_ALIAS_ALL (type));
428 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
429 new_tree = build_type_attribute_qual_variant (new_tree,
430 TYPE_ATTRIBUTES (type),
431 TYPE_QUALS (type));
432 insert_decl_map (id, type, new_tree);
433 return new_tree;
434 }
435 else if (TREE_CODE (type) == REFERENCE_TYPE)
436 {
437 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
438 TYPE_MODE (type),
439 TYPE_REF_CAN_ALIAS_ALL (type));
440 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
441 new_tree = build_type_attribute_qual_variant (new_tree,
442 TYPE_ATTRIBUTES (type),
443 TYPE_QUALS (type));
444 insert_decl_map (id, type, new_tree);
445 return new_tree;
446 }
447 else
448 new_tree = copy_node (type);
449
450 insert_decl_map (id, type, new_tree);
451
452 /* This is a new type, not a copy of an old type. Need to reassociate
453 variants. We can handle everything except the main variant lazily. */
454 t = TYPE_MAIN_VARIANT (type);
455 if (type != t)
456 {
457 t = remap_type (t, id);
458 TYPE_MAIN_VARIANT (new_tree) = t;
459 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
460 TYPE_NEXT_VARIANT (t) = new_tree;
461 }
462 else
463 {
464 TYPE_MAIN_VARIANT (new_tree) = new_tree;
465 TYPE_NEXT_VARIANT (new_tree) = NULL;
466 }
467
468 if (TYPE_STUB_DECL (type))
469 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
470
471 /* Lazily create pointer and reference types. */
472 TYPE_POINTER_TO (new_tree) = NULL;
473 TYPE_REFERENCE_TO (new_tree) = NULL;
474
475 /* Copy all types that may contain references to local variables; be sure to
476 preserve sharing in between type and its main variant when possible. */
477 switch (TREE_CODE (new_tree))
478 {
479 case INTEGER_TYPE:
480 case REAL_TYPE:
481 case FIXED_POINT_TYPE:
482 case ENUMERAL_TYPE:
483 case BOOLEAN_TYPE:
484 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
485 {
486 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
487 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
488
489 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
490 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
491 }
492 else
493 {
494 t = TYPE_MIN_VALUE (new_tree);
495 if (t && TREE_CODE (t) != INTEGER_CST)
496 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
497
498 t = TYPE_MAX_VALUE (new_tree);
499 if (t && TREE_CODE (t) != INTEGER_CST)
500 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
501 }
502 return new_tree;
503
504 case FUNCTION_TYPE:
505 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
506 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
507 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
508 else
509 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
510 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
511 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
512 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
513 else
514 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
515 return new_tree;
516
517 case ARRAY_TYPE:
518 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
519 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
520 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
521 else
522 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
523
524 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
525 {
526 gcc_checking_assert (TYPE_DOMAIN (type)
527 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
528 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
529 }
530 else
531 {
532 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
533 /* For array bounds where we have decided not to copy over the bounds
534 variable which isn't used in OpenMP/OpenACC region, change them to
535 an uninitialized VAR_DECL temporary. */
536 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
537 && id->adjust_array_error_bounds
538 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
539 {
540 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
541 DECL_ATTRIBUTES (v)
542 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
543 DECL_ATTRIBUTES (v));
544 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
545 }
546 }
547 break;
548
549 case RECORD_TYPE:
550 case UNION_TYPE:
551 case QUAL_UNION_TYPE:
552 if (TYPE_MAIN_VARIANT (type) != type
553 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
554 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
555 else
556 {
557 tree f, nf = NULL;
558
559 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
560 {
561 t = remap_decl (f, id);
562 DECL_CONTEXT (t) = new_tree;
563 DECL_CHAIN (t) = nf;
564 nf = t;
565 }
566 TYPE_FIELDS (new_tree) = nreverse (nf);
567 }
568 break;
569
570 case OFFSET_TYPE:
571 default:
572 /* Shouldn't have been thought variable sized. */
573 gcc_unreachable ();
574 }
575
576 /* All variants of type share the same size, so use the already remaped data. */
577 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
578 {
579 tree s = TYPE_SIZE (type);
580 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
581 tree su = TYPE_SIZE_UNIT (type);
582 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
583 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
584 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
585 || s == mvs);
586 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
587 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
588 || su == mvsu);
589 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
590 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
591 }
592 else
593 {
594 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
595 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
596 }
597
598 return new_tree;
599 }
600
601 tree
602 remap_type (tree type, copy_body_data *id)
603 {
604 tree *node;
605 tree tmp;
606
607 if (type == NULL)
608 return type;
609
610 /* See if we have remapped this type. */
611 node = id->decl_map->get (type);
612 if (node)
613 return *node;
614
615 /* The type only needs remapping if it's variably modified. */
616 if (! variably_modified_type_p (type, id->src_fn))
617 {
618 insert_decl_map (id, type, type);
619 return type;
620 }
621
622 id->remapping_type_depth++;
623 tmp = remap_type_1 (type, id);
624 id->remapping_type_depth--;
625
626 return tmp;
627 }
628
629 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
630
631 static bool
632 can_be_nonlocal (tree decl, copy_body_data *id)
633 {
634 /* We cannot duplicate function decls. */
635 if (TREE_CODE (decl) == FUNCTION_DECL)
636 return true;
637
638 /* Local static vars must be non-local or we get multiple declaration
639 problems. */
640 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
641 return true;
642
643 return false;
644 }
645
646 static tree
647 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
648 copy_body_data *id)
649 {
650 tree old_var;
651 tree new_decls = NULL_TREE;
652
653 /* Remap its variables. */
654 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
655 {
656 tree new_var;
657
658 if (can_be_nonlocal (old_var, id))
659 {
660 /* We need to add this variable to the local decls as otherwise
661 nothing else will do so. */
662 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
663 add_local_decl (cfun, old_var);
664 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
665 && !DECL_IGNORED_P (old_var)
666 && nonlocalized_list)
667 vec_safe_push (*nonlocalized_list, old_var);
668 continue;
669 }
670
671 /* Remap the variable. */
672 new_var = remap_decl (old_var, id);
673
674 /* If we didn't remap this variable, we can't mess with its
675 TREE_CHAIN. If we remapped this variable to the return slot, it's
676 already declared somewhere else, so don't declare it here. */
677
678 if (new_var == id->retvar)
679 ;
680 else if (!new_var)
681 {
682 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
683 && !DECL_IGNORED_P (old_var)
684 && nonlocalized_list)
685 vec_safe_push (*nonlocalized_list, old_var);
686 }
687 else
688 {
689 gcc_assert (DECL_P (new_var));
690 DECL_CHAIN (new_var) = new_decls;
691 new_decls = new_var;
692
693 /* Also copy value-expressions. */
694 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
695 {
696 tree tem = DECL_VALUE_EXPR (new_var);
697 bool old_regimplify = id->regimplify;
698 id->remapping_type_depth++;
699 walk_tree (&tem, copy_tree_body_r, id, NULL);
700 id->remapping_type_depth--;
701 id->regimplify = old_regimplify;
702 SET_DECL_VALUE_EXPR (new_var, tem);
703 }
704 }
705 }
706
707 return nreverse (new_decls);
708 }
709
710 /* Copy the BLOCK to contain remapped versions of the variables
711 therein. And hook the new block into the block-tree. */
712
713 static void
714 remap_block (tree *block, copy_body_data *id)
715 {
716 tree old_block;
717 tree new_block;
718
719 /* Make the new block. */
720 old_block = *block;
721 new_block = make_node (BLOCK);
722 TREE_USED (new_block) = TREE_USED (old_block);
723 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
724 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
725 BLOCK_NONLOCALIZED_VARS (new_block)
726 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
727 *block = new_block;
728
729 /* Remap its variables. */
730 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
731 &BLOCK_NONLOCALIZED_VARS (new_block),
732 id);
733
734 if (id->transform_lang_insert_block)
735 id->transform_lang_insert_block (new_block);
736
737 /* Remember the remapped block. */
738 insert_decl_map (id, old_block, new_block);
739 }
740
741 /* Copy the whole block tree and root it in id->block. */
742
743 static tree
744 remap_blocks (tree block, copy_body_data *id)
745 {
746 tree t;
747 tree new_tree = block;
748
749 if (!block)
750 return NULL;
751
752 remap_block (&new_tree, id);
753 gcc_assert (new_tree != block);
754 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
755 prepend_lexical_block (new_tree, remap_blocks (t, id));
756 /* Blocks are in arbitrary order, but make things slightly prettier and do
757 not swap order when producing a copy. */
758 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
759 return new_tree;
760 }
761
762 /* Remap the block tree rooted at BLOCK to nothing. */
763
764 static void
765 remap_blocks_to_null (tree block, copy_body_data *id)
766 {
767 tree t;
768 insert_decl_map (id, block, NULL_TREE);
769 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
770 remap_blocks_to_null (t, id);
771 }
772
773 /* Remap the location info pointed to by LOCUS. */
774
775 static location_t
776 remap_location (location_t locus, copy_body_data *id)
777 {
778 if (LOCATION_BLOCK (locus))
779 {
780 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
781 gcc_assert (n);
782 if (*n)
783 return set_block (locus, *n);
784 }
785
786 locus = LOCATION_LOCUS (locus);
787
788 if (locus != UNKNOWN_LOCATION && id->block)
789 return set_block (locus, id->block);
790
791 return locus;
792 }
793
794 static void
795 copy_statement_list (tree *tp)
796 {
797 tree_stmt_iterator oi, ni;
798 tree new_tree;
799
800 new_tree = alloc_stmt_list ();
801 ni = tsi_start (new_tree);
802 oi = tsi_start (*tp);
803 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
804 *tp = new_tree;
805
806 for (; !tsi_end_p (oi); tsi_next (&oi))
807 {
808 tree stmt = tsi_stmt (oi);
809 if (TREE_CODE (stmt) == STATEMENT_LIST)
810 /* This copy is not redundant; tsi_link_after will smash this
811 STATEMENT_LIST into the end of the one we're building, and we
812 don't want to do that with the original. */
813 copy_statement_list (&stmt);
814 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
815 }
816 }
817
818 static void
819 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
820 {
821 tree block = BIND_EXPR_BLOCK (*tp);
822 /* Copy (and replace) the statement. */
823 copy_tree_r (tp, walk_subtrees, NULL);
824 if (block)
825 {
826 remap_block (&block, id);
827 BIND_EXPR_BLOCK (*tp) = block;
828 }
829
830 if (BIND_EXPR_VARS (*tp))
831 /* This will remap a lot of the same decls again, but this should be
832 harmless. */
833 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
834 }
835
836
837 /* Create a new gimple_seq by remapping all the statements in BODY
838 using the inlining information in ID. */
839
840 static gimple_seq
841 remap_gimple_seq (gimple_seq body, copy_body_data *id)
842 {
843 gimple_stmt_iterator si;
844 gimple_seq new_body = NULL;
845
846 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
847 {
848 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
849 gimple_seq_add_seq (&new_body, new_stmts);
850 }
851
852 return new_body;
853 }
854
855
856 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
857 block using the mapping information in ID. */
858
859 static gimple *
860 copy_gimple_bind (gbind *stmt, copy_body_data *id)
861 {
862 gimple *new_bind;
863 tree new_block, new_vars;
864 gimple_seq body, new_body;
865
866 /* Copy the statement. Note that we purposely don't use copy_stmt
867 here because we need to remap statements as we copy. */
868 body = gimple_bind_body (stmt);
869 new_body = remap_gimple_seq (body, id);
870
871 new_block = gimple_bind_block (stmt);
872 if (new_block)
873 remap_block (&new_block, id);
874
875 /* This will remap a lot of the same decls again, but this should be
876 harmless. */
877 new_vars = gimple_bind_vars (stmt);
878 if (new_vars)
879 new_vars = remap_decls (new_vars, NULL, id);
880
881 new_bind = gimple_build_bind (new_vars, new_body, new_block);
882
883 return new_bind;
884 }
885
886 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
887
888 static bool
889 is_parm (tree decl)
890 {
891 if (TREE_CODE (decl) == SSA_NAME)
892 {
893 decl = SSA_NAME_VAR (decl);
894 if (!decl)
895 return false;
896 }
897
898 return (TREE_CODE (decl) == PARM_DECL);
899 }
900
901 /* Remap the dependence CLIQUE from the source to the destination function
902 as specified in ID. */
903
904 static unsigned short
905 remap_dependence_clique (copy_body_data *id, unsigned short clique)
906 {
907 if (clique == 0 || processing_debug_stmt)
908 return 0;
909 if (!id->dependence_map)
910 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
911 bool existed;
912 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
913 if (!existed)
914 newc = ++cfun->last_clique;
915 return newc;
916 }
917
918 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
919 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
920 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
921 recursing into the children nodes of *TP. */
922
923 static tree
924 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
925 {
926 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
927 copy_body_data *id = (copy_body_data *) wi_p->info;
928 tree fn = id->src_fn;
929
930 /* For recursive invocations this is no longer the LHS itself. */
931 bool is_lhs = wi_p->is_lhs;
932 wi_p->is_lhs = false;
933
934 if (TREE_CODE (*tp) == SSA_NAME)
935 {
936 *tp = remap_ssa_name (*tp, id);
937 *walk_subtrees = 0;
938 if (is_lhs)
939 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
940 return NULL;
941 }
942 else if (auto_var_in_fn_p (*tp, fn))
943 {
944 /* Local variables and labels need to be replaced by equivalent
945 variables. We don't want to copy static variables; there's
946 only one of those, no matter how many times we inline the
947 containing function. Similarly for globals from an outer
948 function. */
949 tree new_decl;
950
951 /* Remap the declaration. */
952 new_decl = remap_decl (*tp, id);
953 gcc_assert (new_decl);
954 /* Replace this variable with the copy. */
955 STRIP_TYPE_NOPS (new_decl);
956 /* ??? The C++ frontend uses void * pointer zero to initialize
957 any other type. This confuses the middle-end type verification.
958 As cloned bodies do not go through gimplification again the fixup
959 there doesn't trigger. */
960 if (TREE_CODE (new_decl) == INTEGER_CST
961 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
962 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
963 *tp = new_decl;
964 *walk_subtrees = 0;
965 }
966 else if (TREE_CODE (*tp) == STATEMENT_LIST)
967 gcc_unreachable ();
968 else if (TREE_CODE (*tp) == SAVE_EXPR)
969 gcc_unreachable ();
970 else if (TREE_CODE (*tp) == LABEL_DECL
971 && (!DECL_CONTEXT (*tp)
972 || decl_function_context (*tp) == id->src_fn))
973 /* These may need to be remapped for EH handling. */
974 *tp = remap_decl (*tp, id);
975 else if (TREE_CODE (*tp) == FIELD_DECL)
976 {
977 /* If the enclosing record type is variably_modified_type_p, the field
978 has already been remapped. Otherwise, it need not be. */
979 tree *n = id->decl_map->get (*tp);
980 if (n)
981 *tp = *n;
982 *walk_subtrees = 0;
983 }
984 else if (TYPE_P (*tp))
985 /* Types may need remapping as well. */
986 *tp = remap_type (*tp, id);
987 else if (CONSTANT_CLASS_P (*tp))
988 {
989 /* If this is a constant, we have to copy the node iff the type
990 will be remapped. copy_tree_r will not copy a constant. */
991 tree new_type = remap_type (TREE_TYPE (*tp), id);
992
993 if (new_type == TREE_TYPE (*tp))
994 *walk_subtrees = 0;
995
996 else if (TREE_CODE (*tp) == INTEGER_CST)
997 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
998 else
999 {
1000 *tp = copy_node (*tp);
1001 TREE_TYPE (*tp) = new_type;
1002 }
1003 }
1004 else
1005 {
1006 /* Otherwise, just copy the node. Note that copy_tree_r already
1007 knows not to copy VAR_DECLs, etc., so this is safe. */
1008
1009 if (TREE_CODE (*tp) == MEM_REF)
1010 {
1011 /* We need to re-canonicalize MEM_REFs from inline substitutions
1012 that can happen when a pointer argument is an ADDR_EXPR.
1013 Recurse here manually to allow that. */
1014 tree ptr = TREE_OPERAND (*tp, 0);
1015 tree type = remap_type (TREE_TYPE (*tp), id);
1016 tree old = *tp;
1017 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1018 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1019 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1020 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1021 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1022 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1023 {
1024 MR_DEPENDENCE_CLIQUE (*tp)
1025 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1026 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1027 }
1028 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1029 remapped a parameter as the property might be valid only
1030 for the parameter itself. */
1031 if (TREE_THIS_NOTRAP (old)
1032 && (!is_parm (TREE_OPERAND (old, 0))
1033 || (!id->transform_parameter && is_parm (ptr))))
1034 TREE_THIS_NOTRAP (*tp) = 1;
1035 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1036 *walk_subtrees = 0;
1037 return NULL;
1038 }
1039
1040 /* Here is the "usual case". Copy this tree node, and then
1041 tweak some special cases. */
1042 copy_tree_r (tp, walk_subtrees, NULL);
1043
1044 if (TREE_CODE (*tp) != OMP_CLAUSE)
1045 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1046
1047 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1048 {
1049 /* The copied TARGET_EXPR has never been expanded, even if the
1050 original node was expanded already. */
1051 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1052 TREE_OPERAND (*tp, 3) = NULL_TREE;
1053 }
1054 else if (TREE_CODE (*tp) == ADDR_EXPR)
1055 {
1056 /* Variable substitution need not be simple. In particular,
1057 the MEM_REF substitution above. Make sure that
1058 TREE_CONSTANT and friends are up-to-date. */
1059 int invariant = is_gimple_min_invariant (*tp);
1060 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1061 recompute_tree_invariant_for_addr_expr (*tp);
1062
1063 /* If this used to be invariant, but is not any longer,
1064 then regimplification is probably needed. */
1065 if (invariant && !is_gimple_min_invariant (*tp))
1066 id->regimplify = true;
1067
1068 *walk_subtrees = 0;
1069 }
1070 }
1071
1072 /* Update the TREE_BLOCK for the cloned expr. */
1073 if (EXPR_P (*tp))
1074 {
1075 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1076 tree old_block = TREE_BLOCK (*tp);
1077 if (old_block)
1078 {
1079 tree *n;
1080 n = id->decl_map->get (TREE_BLOCK (*tp));
1081 if (n)
1082 new_block = *n;
1083 }
1084 TREE_SET_BLOCK (*tp, new_block);
1085 }
1086
1087 /* Keep iterating. */
1088 return NULL_TREE;
1089 }
1090
1091
1092 /* Called from copy_body_id via walk_tree. DATA is really a
1093 `copy_body_data *'. */
1094
1095 tree
1096 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1097 {
1098 copy_body_data *id = (copy_body_data *) data;
1099 tree fn = id->src_fn;
1100 tree new_block;
1101
1102 /* Begin by recognizing trees that we'll completely rewrite for the
1103 inlining context. Our output for these trees is completely
1104 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1105 into an edge). Further down, we'll handle trees that get
1106 duplicated and/or tweaked. */
1107
1108 /* When requested, RETURN_EXPRs should be transformed to just the
1109 contained MODIFY_EXPR. The branch semantics of the return will
1110 be handled elsewhere by manipulating the CFG rather than a statement. */
1111 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1112 {
1113 tree assignment = TREE_OPERAND (*tp, 0);
1114
1115 /* If we're returning something, just turn that into an
1116 assignment into the equivalent of the original RESULT_DECL.
1117 If the "assignment" is just the result decl, the result
1118 decl has already been set (e.g. a recent "foo (&result_decl,
1119 ...)"); just toss the entire RETURN_EXPR. */
1120 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1121 {
1122 /* Replace the RETURN_EXPR with (a copy of) the
1123 MODIFY_EXPR hanging underneath. */
1124 *tp = copy_node (assignment);
1125 }
1126 else /* Else the RETURN_EXPR returns no value. */
1127 {
1128 *tp = NULL;
1129 return (tree) (void *)1;
1130 }
1131 }
1132 else if (TREE_CODE (*tp) == SSA_NAME)
1133 {
1134 *tp = remap_ssa_name (*tp, id);
1135 *walk_subtrees = 0;
1136 return NULL;
1137 }
1138
1139 /* Local variables and labels need to be replaced by equivalent
1140 variables. We don't want to copy static variables; there's only
1141 one of those, no matter how many times we inline the containing
1142 function. Similarly for globals from an outer function. */
1143 else if (auto_var_in_fn_p (*tp, fn))
1144 {
1145 tree new_decl;
1146
1147 /* Remap the declaration. */
1148 new_decl = remap_decl (*tp, id);
1149 gcc_assert (new_decl);
1150 /* Replace this variable with the copy. */
1151 STRIP_TYPE_NOPS (new_decl);
1152 *tp = new_decl;
1153 *walk_subtrees = 0;
1154 }
1155 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1156 copy_statement_list (tp);
1157 else if (TREE_CODE (*tp) == SAVE_EXPR
1158 || TREE_CODE (*tp) == TARGET_EXPR)
1159 remap_save_expr (tp, id->decl_map, walk_subtrees);
1160 else if (TREE_CODE (*tp) == LABEL_DECL
1161 && (! DECL_CONTEXT (*tp)
1162 || decl_function_context (*tp) == id->src_fn))
1163 /* These may need to be remapped for EH handling. */
1164 *tp = remap_decl (*tp, id);
1165 else if (TREE_CODE (*tp) == BIND_EXPR)
1166 copy_bind_expr (tp, walk_subtrees, id);
1167 /* Types may need remapping as well. */
1168 else if (TYPE_P (*tp))
1169 *tp = remap_type (*tp, id);
1170
1171 /* If this is a constant, we have to copy the node iff the type will be
1172 remapped. copy_tree_r will not copy a constant. */
1173 else if (CONSTANT_CLASS_P (*tp))
1174 {
1175 tree new_type = remap_type (TREE_TYPE (*tp), id);
1176
1177 if (new_type == TREE_TYPE (*tp))
1178 *walk_subtrees = 0;
1179
1180 else if (TREE_CODE (*tp) == INTEGER_CST)
1181 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1182 else
1183 {
1184 *tp = copy_node (*tp);
1185 TREE_TYPE (*tp) = new_type;
1186 }
1187 }
1188
1189 /* Otherwise, just copy the node. Note that copy_tree_r already
1190 knows not to copy VAR_DECLs, etc., so this is safe. */
1191 else
1192 {
1193 /* Here we handle trees that are not completely rewritten.
1194 First we detect some inlining-induced bogosities for
1195 discarding. */
1196 if (TREE_CODE (*tp) == MODIFY_EXPR
1197 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1198 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1199 {
1200 /* Some assignments VAR = VAR; don't generate any rtl code
1201 and thus don't count as variable modification. Avoid
1202 keeping bogosities like 0 = 0. */
1203 tree decl = TREE_OPERAND (*tp, 0), value;
1204 tree *n;
1205
1206 n = id->decl_map->get (decl);
1207 if (n)
1208 {
1209 value = *n;
1210 STRIP_TYPE_NOPS (value);
1211 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1212 {
1213 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1214 return copy_tree_body_r (tp, walk_subtrees, data);
1215 }
1216 }
1217 }
1218 else if (TREE_CODE (*tp) == INDIRECT_REF)
1219 {
1220 /* Get rid of *& from inline substitutions that can happen when a
1221 pointer argument is an ADDR_EXPR. */
1222 tree decl = TREE_OPERAND (*tp, 0);
1223 tree *n = id->decl_map->get (decl);
1224 if (n)
1225 {
1226 /* If we happen to get an ADDR_EXPR in n->value, strip
1227 it manually here as we'll eventually get ADDR_EXPRs
1228 which lie about their types pointed to. In this case
1229 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1230 but we absolutely rely on that. As fold_indirect_ref
1231 does other useful transformations, try that first, though. */
1232 tree type = TREE_TYPE (*tp);
1233 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1234 tree old = *tp;
1235 *tp = gimple_fold_indirect_ref (ptr);
1236 if (! *tp)
1237 {
1238 type = remap_type (type, id);
1239 if (TREE_CODE (ptr) == ADDR_EXPR)
1240 {
1241 *tp
1242 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1243 /* ??? We should either assert here or build
1244 a VIEW_CONVERT_EXPR instead of blindly leaking
1245 incompatible types to our IL. */
1246 if (! *tp)
1247 *tp = TREE_OPERAND (ptr, 0);
1248 }
1249 else
1250 {
1251 *tp = build1 (INDIRECT_REF, type, ptr);
1252 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1253 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1254 TREE_READONLY (*tp) = TREE_READONLY (old);
1255 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1256 have remapped a parameter as the property might be
1257 valid only for the parameter itself. */
1258 if (TREE_THIS_NOTRAP (old)
1259 && (!is_parm (TREE_OPERAND (old, 0))
1260 || (!id->transform_parameter && is_parm (ptr))))
1261 TREE_THIS_NOTRAP (*tp) = 1;
1262 }
1263 }
1264 *walk_subtrees = 0;
1265 return NULL;
1266 }
1267 }
1268 else if (TREE_CODE (*tp) == MEM_REF)
1269 {
1270 /* We need to re-canonicalize MEM_REFs from inline substitutions
1271 that can happen when a pointer argument is an ADDR_EXPR.
1272 Recurse here manually to allow that. */
1273 tree ptr = TREE_OPERAND (*tp, 0);
1274 tree type = remap_type (TREE_TYPE (*tp), id);
1275 tree old = *tp;
1276 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1277 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1278 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1279 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1280 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1281 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1282 {
1283 MR_DEPENDENCE_CLIQUE (*tp)
1284 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1285 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1286 }
1287 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1288 remapped a parameter as the property might be valid only
1289 for the parameter itself. */
1290 if (TREE_THIS_NOTRAP (old)
1291 && (!is_parm (TREE_OPERAND (old, 0))
1292 || (!id->transform_parameter && is_parm (ptr))))
1293 TREE_THIS_NOTRAP (*tp) = 1;
1294 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1295 *walk_subtrees = 0;
1296 return NULL;
1297 }
1298
1299 /* Here is the "usual case". Copy this tree node, and then
1300 tweak some special cases. */
1301 copy_tree_r (tp, walk_subtrees, NULL);
1302
1303 /* If EXPR has block defined, map it to newly constructed block.
1304 When inlining we want EXPRs without block appear in the block
1305 of function call if we are not remapping a type. */
1306 if (EXPR_P (*tp))
1307 {
1308 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1309 if (TREE_BLOCK (*tp))
1310 {
1311 tree *n;
1312 n = id->decl_map->get (TREE_BLOCK (*tp));
1313 if (n)
1314 new_block = *n;
1315 }
1316 TREE_SET_BLOCK (*tp, new_block);
1317 }
1318
1319 if (TREE_CODE (*tp) != OMP_CLAUSE)
1320 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1321
1322 /* The copied TARGET_EXPR has never been expanded, even if the
1323 original node was expanded already. */
1324 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1325 {
1326 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1327 TREE_OPERAND (*tp, 3) = NULL_TREE;
1328 }
1329
1330 /* Variable substitution need not be simple. In particular, the
1331 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1332 and friends are up-to-date. */
1333 else if (TREE_CODE (*tp) == ADDR_EXPR)
1334 {
1335 int invariant = is_gimple_min_invariant (*tp);
1336 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1337
1338 /* Handle the case where we substituted an INDIRECT_REF
1339 into the operand of the ADDR_EXPR. */
1340 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1341 {
1342 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1343 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1344 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1345 *tp = t;
1346 }
1347 else
1348 recompute_tree_invariant_for_addr_expr (*tp);
1349
1350 /* If this used to be invariant, but is not any longer,
1351 then regimplification is probably needed. */
1352 if (invariant && !is_gimple_min_invariant (*tp))
1353 id->regimplify = true;
1354
1355 *walk_subtrees = 0;
1356 }
1357 }
1358
1359 /* Keep iterating. */
1360 return NULL_TREE;
1361 }
1362
1363 /* Helper for remap_gimple_stmt. Given an EH region number for the
1364 source function, map that to the duplicate EH region number in
1365 the destination function. */
1366
1367 static int
1368 remap_eh_region_nr (int old_nr, copy_body_data *id)
1369 {
1370 eh_region old_r, new_r;
1371
1372 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1373 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1374
1375 return new_r->index;
1376 }
1377
1378 /* Similar, but operate on INTEGER_CSTs. */
1379
1380 static tree
1381 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1382 {
1383 int old_nr, new_nr;
1384
1385 old_nr = tree_to_shwi (old_t_nr);
1386 new_nr = remap_eh_region_nr (old_nr, id);
1387
1388 return build_int_cst (integer_type_node, new_nr);
1389 }
1390
1391 /* Helper for copy_bb. Remap statement STMT using the inlining
1392 information in ID. Return the new statement copy. */
1393
1394 static gimple_seq
1395 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1396 {
1397 gimple *copy = NULL;
1398 struct walk_stmt_info wi;
1399 bool skip_first = false;
1400 gimple_seq stmts = NULL;
1401
1402 if (is_gimple_debug (stmt)
1403 && (gimple_debug_nonbind_marker_p (stmt)
1404 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1405 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1406 return NULL;
1407
1408 /* Begin by recognizing trees that we'll completely rewrite for the
1409 inlining context. Our output for these trees is completely
1410 different from our input (e.g. RETURN_EXPR is deleted and morphs
1411 into an edge). Further down, we'll handle trees that get
1412 duplicated and/or tweaked. */
1413
1414 /* When requested, GIMPLE_RETURN should be transformed to just the
1415 contained GIMPLE_ASSIGN. The branch semantics of the return will
1416 be handled elsewhere by manipulating the CFG rather than the
1417 statement. */
1418 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1419 {
1420 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1421
1422 /* If we're returning something, just turn that into an
1423 assignment to the equivalent of the original RESULT_DECL.
1424 If RETVAL is just the result decl, the result decl has
1425 already been set (e.g. a recent "foo (&result_decl, ...)");
1426 just toss the entire GIMPLE_RETURN. */
1427 if (retval
1428 && (TREE_CODE (retval) != RESULT_DECL
1429 && (TREE_CODE (retval) != SSA_NAME
1430 || ! SSA_NAME_VAR (retval)
1431 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1432 {
1433 copy = gimple_build_assign (id->do_not_unshare
1434 ? id->retvar : unshare_expr (id->retvar),
1435 retval);
1436 /* id->retvar is already substituted. Skip it on later remapping. */
1437 skip_first = true;
1438 }
1439 else
1440 return NULL;
1441 }
1442 else if (gimple_has_substatements (stmt))
1443 {
1444 gimple_seq s1, s2;
1445
1446 /* When cloning bodies from the C++ front end, we will be handed bodies
1447 in High GIMPLE form. Handle here all the High GIMPLE statements that
1448 have embedded statements. */
1449 switch (gimple_code (stmt))
1450 {
1451 case GIMPLE_BIND:
1452 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1453 break;
1454
1455 case GIMPLE_CATCH:
1456 {
1457 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1458 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1459 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1460 }
1461 break;
1462
1463 case GIMPLE_EH_FILTER:
1464 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1465 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1466 break;
1467
1468 case GIMPLE_TRY:
1469 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1470 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1471 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1472 break;
1473
1474 case GIMPLE_WITH_CLEANUP_EXPR:
1475 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1476 copy = gimple_build_wce (s1);
1477 break;
1478
1479 case GIMPLE_OMP_PARALLEL:
1480 {
1481 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1482 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1483 copy = gimple_build_omp_parallel
1484 (s1,
1485 gimple_omp_parallel_clauses (omp_par_stmt),
1486 gimple_omp_parallel_child_fn (omp_par_stmt),
1487 gimple_omp_parallel_data_arg (omp_par_stmt));
1488 }
1489 break;
1490
1491 case GIMPLE_OMP_TASK:
1492 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1493 copy = gimple_build_omp_task
1494 (s1,
1495 gimple_omp_task_clauses (stmt),
1496 gimple_omp_task_child_fn (stmt),
1497 gimple_omp_task_data_arg (stmt),
1498 gimple_omp_task_copy_fn (stmt),
1499 gimple_omp_task_arg_size (stmt),
1500 gimple_omp_task_arg_align (stmt));
1501 break;
1502
1503 case GIMPLE_OMP_FOR:
1504 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1505 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1506 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1507 gimple_omp_for_clauses (stmt),
1508 gimple_omp_for_collapse (stmt), s2);
1509 {
1510 size_t i;
1511 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1512 {
1513 gimple_omp_for_set_index (copy, i,
1514 gimple_omp_for_index (stmt, i));
1515 gimple_omp_for_set_initial (copy, i,
1516 gimple_omp_for_initial (stmt, i));
1517 gimple_omp_for_set_final (copy, i,
1518 gimple_omp_for_final (stmt, i));
1519 gimple_omp_for_set_incr (copy, i,
1520 gimple_omp_for_incr (stmt, i));
1521 gimple_omp_for_set_cond (copy, i,
1522 gimple_omp_for_cond (stmt, i));
1523 }
1524 }
1525 break;
1526
1527 case GIMPLE_OMP_MASTER:
1528 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1529 copy = gimple_build_omp_master (s1);
1530 break;
1531
1532 case GIMPLE_OMP_TASKGROUP:
1533 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1534 copy = gimple_build_omp_taskgroup
1535 (s1, gimple_omp_taskgroup_clauses (stmt));
1536 break;
1537
1538 case GIMPLE_OMP_ORDERED:
1539 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1540 copy = gimple_build_omp_ordered
1541 (s1,
1542 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1543 break;
1544
1545 case GIMPLE_OMP_SECTION:
1546 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1547 copy = gimple_build_omp_section (s1);
1548 break;
1549
1550 case GIMPLE_OMP_SECTIONS:
1551 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1552 copy = gimple_build_omp_sections
1553 (s1, gimple_omp_sections_clauses (stmt));
1554 break;
1555
1556 case GIMPLE_OMP_SINGLE:
1557 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1558 copy = gimple_build_omp_single
1559 (s1, gimple_omp_single_clauses (stmt));
1560 break;
1561
1562 case GIMPLE_OMP_TARGET:
1563 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1564 copy = gimple_build_omp_target
1565 (s1, gimple_omp_target_kind (stmt),
1566 gimple_omp_target_clauses (stmt));
1567 break;
1568
1569 case GIMPLE_OMP_TEAMS:
1570 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1571 copy = gimple_build_omp_teams
1572 (s1, gimple_omp_teams_clauses (stmt));
1573 break;
1574
1575 case GIMPLE_OMP_CRITICAL:
1576 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1577 copy = gimple_build_omp_critical (s1,
1578 gimple_omp_critical_name
1579 (as_a <gomp_critical *> (stmt)),
1580 gimple_omp_critical_clauses
1581 (as_a <gomp_critical *> (stmt)));
1582 break;
1583
1584 case GIMPLE_TRANSACTION:
1585 {
1586 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1587 gtransaction *new_trans_stmt;
1588 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1589 id);
1590 copy = new_trans_stmt = gimple_build_transaction (s1);
1591 gimple_transaction_set_subcode (new_trans_stmt,
1592 gimple_transaction_subcode (old_trans_stmt));
1593 gimple_transaction_set_label_norm (new_trans_stmt,
1594 gimple_transaction_label_norm (old_trans_stmt));
1595 gimple_transaction_set_label_uninst (new_trans_stmt,
1596 gimple_transaction_label_uninst (old_trans_stmt));
1597 gimple_transaction_set_label_over (new_trans_stmt,
1598 gimple_transaction_label_over (old_trans_stmt));
1599 }
1600 break;
1601
1602 default:
1603 gcc_unreachable ();
1604 }
1605 }
1606 else
1607 {
1608 if (gimple_assign_copy_p (stmt)
1609 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1610 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1611 {
1612 /* Here we handle statements that are not completely rewritten.
1613 First we detect some inlining-induced bogosities for
1614 discarding. */
1615
1616 /* Some assignments VAR = VAR; don't generate any rtl code
1617 and thus don't count as variable modification. Avoid
1618 keeping bogosities like 0 = 0. */
1619 tree decl = gimple_assign_lhs (stmt), value;
1620 tree *n;
1621
1622 n = id->decl_map->get (decl);
1623 if (n)
1624 {
1625 value = *n;
1626 STRIP_TYPE_NOPS (value);
1627 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1628 return NULL;
1629 }
1630 }
1631
1632 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1633 in a block that we aren't copying during tree_function_versioning,
1634 just drop the clobber stmt. */
1635 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1636 {
1637 tree lhs = gimple_assign_lhs (stmt);
1638 if (TREE_CODE (lhs) == MEM_REF
1639 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1640 {
1641 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1642 if (gimple_bb (def_stmt)
1643 && !bitmap_bit_p (id->blocks_to_copy,
1644 gimple_bb (def_stmt)->index))
1645 return NULL;
1646 }
1647 }
1648
1649 if (gimple_debug_bind_p (stmt))
1650 {
1651 gdebug *copy
1652 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1653 gimple_debug_bind_get_value (stmt),
1654 stmt);
1655 if (id->reset_location)
1656 gimple_set_location (copy, input_location);
1657 id->debug_stmts.safe_push (copy);
1658 gimple_seq_add_stmt (&stmts, copy);
1659 return stmts;
1660 }
1661 if (gimple_debug_source_bind_p (stmt))
1662 {
1663 gdebug *copy = gimple_build_debug_source_bind
1664 (gimple_debug_source_bind_get_var (stmt),
1665 gimple_debug_source_bind_get_value (stmt),
1666 stmt);
1667 if (id->reset_location)
1668 gimple_set_location (copy, input_location);
1669 id->debug_stmts.safe_push (copy);
1670 gimple_seq_add_stmt (&stmts, copy);
1671 return stmts;
1672 }
1673 if (gimple_debug_nonbind_marker_p (stmt))
1674 {
1675 /* If the inlined function has too many debug markers,
1676 don't copy them. */
1677 if (id->src_cfun->debug_marker_count
1678 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1679 return stmts;
1680
1681 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1682 if (id->reset_location)
1683 gimple_set_location (copy, input_location);
1684 id->debug_stmts.safe_push (copy);
1685 gimple_seq_add_stmt (&stmts, copy);
1686 return stmts;
1687 }
1688
1689 /* Create a new deep copy of the statement. */
1690 copy = gimple_copy (stmt);
1691
1692 /* Clear flags that need revisiting. */
1693 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1694 {
1695 if (gimple_call_tail_p (call_stmt))
1696 gimple_call_set_tail (call_stmt, false);
1697 if (gimple_call_from_thunk_p (call_stmt))
1698 gimple_call_set_from_thunk (call_stmt, false);
1699 if (gimple_call_internal_p (call_stmt))
1700 switch (gimple_call_internal_fn (call_stmt))
1701 {
1702 case IFN_GOMP_SIMD_LANE:
1703 case IFN_GOMP_SIMD_VF:
1704 case IFN_GOMP_SIMD_LAST_LANE:
1705 case IFN_GOMP_SIMD_ORDERED_START:
1706 case IFN_GOMP_SIMD_ORDERED_END:
1707 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1708 break;
1709 default:
1710 break;
1711 }
1712 }
1713
1714 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1715 RESX and EH_DISPATCH. */
1716 if (id->eh_map)
1717 switch (gimple_code (copy))
1718 {
1719 case GIMPLE_CALL:
1720 {
1721 tree r, fndecl = gimple_call_fndecl (copy);
1722 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1723 switch (DECL_FUNCTION_CODE (fndecl))
1724 {
1725 case BUILT_IN_EH_COPY_VALUES:
1726 r = gimple_call_arg (copy, 1);
1727 r = remap_eh_region_tree_nr (r, id);
1728 gimple_call_set_arg (copy, 1, r);
1729 /* FALLTHRU */
1730
1731 case BUILT_IN_EH_POINTER:
1732 case BUILT_IN_EH_FILTER:
1733 r = gimple_call_arg (copy, 0);
1734 r = remap_eh_region_tree_nr (r, id);
1735 gimple_call_set_arg (copy, 0, r);
1736 break;
1737
1738 default:
1739 break;
1740 }
1741
1742 /* Reset alias info if we didn't apply measures to
1743 keep it valid over inlining by setting DECL_PT_UID. */
1744 if (!id->src_cfun->gimple_df
1745 || !id->src_cfun->gimple_df->ipa_pta)
1746 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1747 }
1748 break;
1749
1750 case GIMPLE_RESX:
1751 {
1752 gresx *resx_stmt = as_a <gresx *> (copy);
1753 int r = gimple_resx_region (resx_stmt);
1754 r = remap_eh_region_nr (r, id);
1755 gimple_resx_set_region (resx_stmt, r);
1756 }
1757 break;
1758
1759 case GIMPLE_EH_DISPATCH:
1760 {
1761 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1762 int r = gimple_eh_dispatch_region (eh_dispatch);
1763 r = remap_eh_region_nr (r, id);
1764 gimple_eh_dispatch_set_region (eh_dispatch, r);
1765 }
1766 break;
1767
1768 default:
1769 break;
1770 }
1771 }
1772
1773 /* If STMT has a block defined, map it to the newly constructed block. */
1774 if (gimple_block (copy))
1775 {
1776 tree *n;
1777 n = id->decl_map->get (gimple_block (copy));
1778 gcc_assert (n);
1779 gimple_set_block (copy, *n);
1780 }
1781
1782 if (id->reset_location)
1783 gimple_set_location (copy, input_location);
1784
1785 /* Debug statements ought to be rebuilt and not copied. */
1786 gcc_checking_assert (!is_gimple_debug (copy));
1787
1788 /* Remap all the operands in COPY. */
1789 memset (&wi, 0, sizeof (wi));
1790 wi.info = id;
1791 if (skip_first)
1792 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1793 else
1794 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1795
1796 /* Clear the copied virtual operands. We are not remapping them here
1797 but are going to recreate them from scratch. */
1798 if (gimple_has_mem_ops (copy))
1799 {
1800 gimple_set_vdef (copy, NULL_TREE);
1801 gimple_set_vuse (copy, NULL_TREE);
1802 }
1803
1804 gimple_seq_add_stmt (&stmts, copy);
1805 return stmts;
1806 }
1807
1808
1809 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1810 later */
1811
1812 static basic_block
1813 copy_bb (copy_body_data *id, basic_block bb,
1814 profile_count num, profile_count den)
1815 {
1816 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1817 basic_block copy_basic_block;
1818 tree decl;
1819 basic_block prev;
1820
1821 profile_count::adjust_for_ipa_scaling (&num, &den);
1822
1823 /* Search for previous copied basic block. */
1824 prev = bb->prev_bb;
1825 while (!prev->aux)
1826 prev = prev->prev_bb;
1827
1828 /* create_basic_block() will append every new block to
1829 basic_block_info automatically. */
1830 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1831 copy_basic_block->count = bb->count.apply_scale (num, den);
1832
1833 copy_gsi = gsi_start_bb (copy_basic_block);
1834
1835 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1836 {
1837 gimple_seq stmts;
1838 gimple *stmt = gsi_stmt (gsi);
1839 gimple *orig_stmt = stmt;
1840 gimple_stmt_iterator stmts_gsi;
1841 bool stmt_added = false;
1842
1843 id->regimplify = false;
1844 stmts = remap_gimple_stmt (stmt, id);
1845
1846 if (gimple_seq_empty_p (stmts))
1847 continue;
1848
1849 seq_gsi = copy_gsi;
1850
1851 for (stmts_gsi = gsi_start (stmts);
1852 !gsi_end_p (stmts_gsi); )
1853 {
1854 stmt = gsi_stmt (stmts_gsi);
1855
1856 /* Advance iterator now before stmt is moved to seq_gsi. */
1857 gsi_next (&stmts_gsi);
1858
1859 if (gimple_nop_p (stmt))
1860 continue;
1861
1862 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1863 orig_stmt);
1864
1865 /* With return slot optimization we can end up with
1866 non-gimple (foo *)&this->m, fix that here. */
1867 if (is_gimple_assign (stmt)
1868 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1869 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1870 {
1871 tree new_rhs;
1872 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1873 gimple_assign_rhs1 (stmt),
1874 true, NULL, false,
1875 GSI_CONTINUE_LINKING);
1876 gimple_assign_set_rhs1 (stmt, new_rhs);
1877 id->regimplify = false;
1878 }
1879
1880 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1881
1882 if (id->regimplify)
1883 gimple_regimplify_operands (stmt, &seq_gsi);
1884
1885 stmt_added = true;
1886 }
1887
1888 if (!stmt_added)
1889 continue;
1890
1891 /* If copy_basic_block has been empty at the start of this iteration,
1892 call gsi_start_bb again to get at the newly added statements. */
1893 if (gsi_end_p (copy_gsi))
1894 copy_gsi = gsi_start_bb (copy_basic_block);
1895 else
1896 gsi_next (&copy_gsi);
1897
1898 /* Process the new statement. The call to gimple_regimplify_operands
1899 possibly turned the statement into multiple statements, we
1900 need to process all of them. */
1901 do
1902 {
1903 tree fn;
1904 gcall *call_stmt;
1905
1906 stmt = gsi_stmt (copy_gsi);
1907 call_stmt = dyn_cast <gcall *> (stmt);
1908 if (call_stmt
1909 && gimple_call_va_arg_pack_p (call_stmt)
1910 && id->call_stmt
1911 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1912 {
1913 /* __builtin_va_arg_pack () should be replaced by
1914 all arguments corresponding to ... in the caller. */
1915 tree p;
1916 gcall *new_call;
1917 vec<tree> argarray;
1918 size_t nargs = gimple_call_num_args (id->call_stmt);
1919 size_t n;
1920
1921 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1922 nargs--;
1923
1924 /* Create the new array of arguments. */
1925 n = nargs + gimple_call_num_args (call_stmt);
1926 argarray.create (n);
1927 argarray.safe_grow_cleared (n);
1928
1929 /* Copy all the arguments before '...' */
1930 memcpy (argarray.address (),
1931 gimple_call_arg_ptr (call_stmt, 0),
1932 gimple_call_num_args (call_stmt) * sizeof (tree));
1933
1934 /* Append the arguments passed in '...' */
1935 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1936 gimple_call_arg_ptr (id->call_stmt, 0)
1937 + (gimple_call_num_args (id->call_stmt) - nargs),
1938 nargs * sizeof (tree));
1939
1940 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1941 argarray);
1942
1943 argarray.release ();
1944
1945 /* Copy all GIMPLE_CALL flags, location and block, except
1946 GF_CALL_VA_ARG_PACK. */
1947 gimple_call_copy_flags (new_call, call_stmt);
1948 gimple_call_set_va_arg_pack (new_call, false);
1949 gimple_set_location (new_call, gimple_location (stmt));
1950 gimple_set_block (new_call, gimple_block (stmt));
1951 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1952
1953 gsi_replace (&copy_gsi, new_call, false);
1954 stmt = new_call;
1955 }
1956 else if (call_stmt
1957 && id->call_stmt
1958 && (decl = gimple_call_fndecl (stmt))
1959 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
1960 {
1961 /* __builtin_va_arg_pack_len () should be replaced by
1962 the number of anonymous arguments. */
1963 size_t nargs = gimple_call_num_args (id->call_stmt);
1964 tree count, p;
1965 gimple *new_stmt;
1966
1967 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1968 nargs--;
1969
1970 if (!gimple_call_lhs (stmt))
1971 {
1972 /* Drop unused calls. */
1973 gsi_remove (&copy_gsi, false);
1974 continue;
1975 }
1976 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
1977 {
1978 count = build_int_cst (integer_type_node, nargs);
1979 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1980 gsi_replace (&copy_gsi, new_stmt, false);
1981 stmt = new_stmt;
1982 }
1983 else if (nargs != 0)
1984 {
1985 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
1986 count = build_int_cst (integer_type_node, nargs);
1987 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1988 PLUS_EXPR, newlhs, count);
1989 gimple_call_set_lhs (stmt, newlhs);
1990 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
1991 }
1992 }
1993 else if (call_stmt
1994 && id->call_stmt
1995 && gimple_call_internal_p (stmt)
1996 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1997 {
1998 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1999 gsi_remove (&copy_gsi, false);
2000 continue;
2001 }
2002
2003 /* Statements produced by inlining can be unfolded, especially
2004 when we constant propagated some operands. We can't fold
2005 them right now for two reasons:
2006 1) folding require SSA_NAME_DEF_STMTs to be correct
2007 2) we can't change function calls to builtins.
2008 So we just mark statement for later folding. We mark
2009 all new statements, instead just statements that has changed
2010 by some nontrivial substitution so even statements made
2011 foldable indirectly are updated. If this turns out to be
2012 expensive, copy_body can be told to watch for nontrivial
2013 changes. */
2014 if (id->statements_to_fold)
2015 id->statements_to_fold->add (stmt);
2016
2017 /* We're duplicating a CALL_EXPR. Find any corresponding
2018 callgraph edges and update or duplicate them. */
2019 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2020 {
2021 struct cgraph_edge *edge;
2022
2023 switch (id->transform_call_graph_edges)
2024 {
2025 case CB_CGE_DUPLICATE:
2026 edge = id->src_node->get_edge (orig_stmt);
2027 if (edge)
2028 {
2029 struct cgraph_edge *old_edge = edge;
2030 profile_count old_cnt = edge->count;
2031 edge = edge->clone (id->dst_node, call_stmt,
2032 gimple_uid (stmt),
2033 num, den,
2034 true);
2035
2036 /* Speculative calls consist of two edges - direct and
2037 indirect. Duplicate the whole thing and distribute
2038 frequencies accordingly. */
2039 if (edge->speculative)
2040 {
2041 struct cgraph_edge *direct, *indirect;
2042 struct ipa_ref *ref;
2043
2044 gcc_assert (!edge->indirect_unknown_callee);
2045 old_edge->speculative_call_info (direct, indirect, ref);
2046
2047 profile_count indir_cnt = indirect->count;
2048 indirect = indirect->clone (id->dst_node, call_stmt,
2049 gimple_uid (stmt),
2050 num, den,
2051 true);
2052
2053 profile_probability prob
2054 = indir_cnt.probability_in (old_cnt + indir_cnt);
2055 indirect->count
2056 = copy_basic_block->count.apply_probability (prob);
2057 edge->count = copy_basic_block->count - indirect->count;
2058 id->dst_node->clone_reference (ref, stmt);
2059 }
2060 else
2061 edge->count = copy_basic_block->count;
2062 }
2063 break;
2064
2065 case CB_CGE_MOVE_CLONES:
2066 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2067 call_stmt);
2068 edge = id->dst_node->get_edge (stmt);
2069 break;
2070
2071 case CB_CGE_MOVE:
2072 edge = id->dst_node->get_edge (orig_stmt);
2073 if (edge)
2074 edge->set_call_stmt (call_stmt);
2075 break;
2076
2077 default:
2078 gcc_unreachable ();
2079 }
2080
2081 /* Constant propagation on argument done during inlining
2082 may create new direct call. Produce an edge for it. */
2083 if ((!edge
2084 || (edge->indirect_inlining_edge
2085 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2086 && id->dst_node->definition
2087 && (fn = gimple_call_fndecl (stmt)) != NULL)
2088 {
2089 struct cgraph_node *dest = cgraph_node::get_create (fn);
2090
2091 /* We have missing edge in the callgraph. This can happen
2092 when previous inlining turned an indirect call into a
2093 direct call by constant propagating arguments or we are
2094 producing dead clone (for further cloning). In all
2095 other cases we hit a bug (incorrect node sharing is the
2096 most common reason for missing edges). */
2097 gcc_assert (!dest->definition
2098 || dest->address_taken
2099 || !id->src_node->definition
2100 || !id->dst_node->definition);
2101 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2102 id->dst_node->create_edge_including_clones
2103 (dest, orig_stmt, call_stmt, bb->count,
2104 CIF_ORIGINALLY_INDIRECT_CALL);
2105 else
2106 id->dst_node->create_edge (dest, call_stmt,
2107 bb->count)->inline_failed
2108 = CIF_ORIGINALLY_INDIRECT_CALL;
2109 if (dump_file)
2110 {
2111 fprintf (dump_file, "Created new direct edge to %s\n",
2112 dest->name ());
2113 }
2114 }
2115
2116 notice_special_calls (as_a <gcall *> (stmt));
2117 }
2118
2119 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2120 id->eh_map, id->eh_lp_nr);
2121
2122 gsi_next (&copy_gsi);
2123 }
2124 while (!gsi_end_p (copy_gsi));
2125
2126 copy_gsi = gsi_last_bb (copy_basic_block);
2127 }
2128
2129 return copy_basic_block;
2130 }
2131
2132 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2133 form is quite easy, since dominator relationship for old basic blocks does
2134 not change.
2135
2136 There is however exception where inlining might change dominator relation
2137 across EH edges from basic block within inlined functions destinating
2138 to landing pads in function we inline into.
2139
2140 The function fills in PHI_RESULTs of such PHI nodes if they refer
2141 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2142 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2143 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2144 set, and this means that there will be no overlapping live ranges
2145 for the underlying symbol.
2146
2147 This might change in future if we allow redirecting of EH edges and
2148 we might want to change way build CFG pre-inlining to include
2149 all the possible edges then. */
2150 static void
2151 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2152 bool can_throw, bool nonlocal_goto)
2153 {
2154 edge e;
2155 edge_iterator ei;
2156
2157 FOR_EACH_EDGE (e, ei, bb->succs)
2158 if (!e->dest->aux
2159 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2160 {
2161 gphi *phi;
2162 gphi_iterator si;
2163
2164 if (!nonlocal_goto)
2165 gcc_assert (e->flags & EDGE_EH);
2166
2167 if (!can_throw)
2168 gcc_assert (!(e->flags & EDGE_EH));
2169
2170 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2171 {
2172 edge re;
2173
2174 phi = si.phi ();
2175
2176 /* For abnormal goto/call edges the receiver can be the
2177 ENTRY_BLOCK. Do not assert this cannot happen. */
2178
2179 gcc_assert ((e->flags & EDGE_EH)
2180 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2181
2182 re = find_edge (ret_bb, e->dest);
2183 gcc_checking_assert (re);
2184 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2185 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2186
2187 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2188 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2189 }
2190 }
2191 }
2192
2193 /* Insert clobbers for automatic variables of inlined ID->src_fn
2194 function at the start of basic block BB. */
2195
2196 static void
2197 add_clobbers_to_eh_landing_pad (basic_block bb, copy_body_data *id)
2198 {
2199 tree var;
2200 unsigned int i;
2201 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2202 if (VAR_P (var)
2203 && !DECL_HARD_REGISTER (var)
2204 && !TREE_THIS_VOLATILE (var)
2205 && !DECL_HAS_VALUE_EXPR_P (var)
2206 && !is_gimple_reg (var)
2207 && auto_var_in_fn_p (var, id->src_fn)
2208 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2209 {
2210 tree *t = id->decl_map->get (var);
2211 if (!t)
2212 continue;
2213 tree new_var = *t;
2214 if (VAR_P (new_var)
2215 && !DECL_HARD_REGISTER (new_var)
2216 && !TREE_THIS_VOLATILE (new_var)
2217 && !DECL_HAS_VALUE_EXPR_P (new_var)
2218 && !is_gimple_reg (new_var)
2219 && auto_var_in_fn_p (new_var, id->dst_fn))
2220 {
2221 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2222 tree clobber = build_clobber (TREE_TYPE (new_var));
2223 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2224 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2225 }
2226 }
2227 }
2228
2229 /* Copy edges from BB into its copy constructed earlier, scale profile
2230 accordingly. Edges will be taken care of later. Assume aux
2231 pointers to point to the copies of each BB. Return true if any
2232 debug stmts are left after a statement that must end the basic block. */
2233
2234 static bool
2235 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2236 basic_block ret_bb, basic_block abnormal_goto_dest,
2237 copy_body_data *id)
2238 {
2239 basic_block new_bb = (basic_block) bb->aux;
2240 edge_iterator ei;
2241 edge old_edge;
2242 gimple_stmt_iterator si;
2243 bool need_debug_cleanup = false;
2244
2245 /* Use the indices from the original blocks to create edges for the
2246 new ones. */
2247 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2248 if (!(old_edge->flags & EDGE_EH))
2249 {
2250 edge new_edge;
2251 int flags = old_edge->flags;
2252 location_t locus = old_edge->goto_locus;
2253
2254 /* Return edges do get a FALLTHRU flag when they get inlined. */
2255 if (old_edge->dest->index == EXIT_BLOCK
2256 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2257 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2258 flags |= EDGE_FALLTHRU;
2259
2260 new_edge
2261 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2262 new_edge->probability = old_edge->probability;
2263 if (!id->reset_location)
2264 new_edge->goto_locus = remap_location (locus, id);
2265 }
2266
2267 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2268 return false;
2269
2270 /* When doing function splitting, we must decrease count of the return block
2271 which was previously reachable by block we did not copy. */
2272 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2273 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2274 if (old_edge->src->index != ENTRY_BLOCK
2275 && !old_edge->src->aux)
2276 new_bb->count -= old_edge->count ().apply_scale (num, den);
2277
2278 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2279 {
2280 gimple *copy_stmt;
2281 bool can_throw, nonlocal_goto;
2282
2283 copy_stmt = gsi_stmt (si);
2284 if (!is_gimple_debug (copy_stmt))
2285 update_stmt (copy_stmt);
2286
2287 /* Do this before the possible split_block. */
2288 gsi_next (&si);
2289
2290 /* If this tree could throw an exception, there are two
2291 cases where we need to add abnormal edge(s): the
2292 tree wasn't in a region and there is a "current
2293 region" in the caller; or the original tree had
2294 EH edges. In both cases split the block after the tree,
2295 and add abnormal edge(s) as needed; we need both
2296 those from the callee and the caller.
2297 We check whether the copy can throw, because the const
2298 propagation can change an INDIRECT_REF which throws
2299 into a COMPONENT_REF which doesn't. If the copy
2300 can throw, the original could also throw. */
2301 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2302 nonlocal_goto
2303 = (stmt_can_make_abnormal_goto (copy_stmt)
2304 && !computed_goto_p (copy_stmt));
2305
2306 if (can_throw || nonlocal_goto)
2307 {
2308 if (!gsi_end_p (si))
2309 {
2310 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2311 gsi_next (&si);
2312 if (gsi_end_p (si))
2313 need_debug_cleanup = true;
2314 }
2315 if (!gsi_end_p (si))
2316 /* Note that bb's predecessor edges aren't necessarily
2317 right at this point; split_block doesn't care. */
2318 {
2319 edge e = split_block (new_bb, copy_stmt);
2320
2321 new_bb = e->dest;
2322 new_bb->aux = e->src->aux;
2323 si = gsi_start_bb (new_bb);
2324 }
2325 }
2326
2327 bool update_probs = false;
2328
2329 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2330 {
2331 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2332 update_probs = true;
2333 }
2334 else if (can_throw)
2335 {
2336 make_eh_edges (copy_stmt);
2337 update_probs = true;
2338 }
2339
2340 /* EH edges may not match old edges. Copy as much as possible. */
2341 if (update_probs)
2342 {
2343 edge e;
2344 edge_iterator ei;
2345 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2346
2347 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2348 if ((old_edge->flags & EDGE_EH)
2349 && (e = find_edge (copy_stmt_bb,
2350 (basic_block) old_edge->dest->aux))
2351 && (e->flags & EDGE_EH))
2352 e->probability = old_edge->probability;
2353
2354 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2355 if (e->flags & EDGE_EH)
2356 {
2357 if (!e->probability.initialized_p ())
2358 e->probability = profile_probability::never ();
2359 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2360 {
2361 add_clobbers_to_eh_landing_pad (e->dest, id);
2362 id->add_clobbers_to_eh_landing_pads = 0;
2363 }
2364 }
2365 }
2366
2367
2368 /* If the call we inline cannot make abnormal goto do not add
2369 additional abnormal edges but only retain those already present
2370 in the original function body. */
2371 if (abnormal_goto_dest == NULL)
2372 nonlocal_goto = false;
2373 if (nonlocal_goto)
2374 {
2375 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2376
2377 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2378 nonlocal_goto = false;
2379 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2380 in OpenMP regions which aren't allowed to be left abnormally.
2381 So, no need to add abnormal edge in that case. */
2382 else if (is_gimple_call (copy_stmt)
2383 && gimple_call_internal_p (copy_stmt)
2384 && (gimple_call_internal_fn (copy_stmt)
2385 == IFN_ABNORMAL_DISPATCHER)
2386 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2387 nonlocal_goto = false;
2388 else
2389 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2390 EDGE_ABNORMAL);
2391 }
2392
2393 if ((can_throw || nonlocal_goto)
2394 && gimple_in_ssa_p (cfun))
2395 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2396 can_throw, nonlocal_goto);
2397 }
2398 return need_debug_cleanup;
2399 }
2400
2401 /* Copy the PHIs. All blocks and edges are copied, some blocks
2402 was possibly split and new outgoing EH edges inserted.
2403 BB points to the block of original function and AUX pointers links
2404 the original and newly copied blocks. */
2405
2406 static void
2407 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2408 {
2409 basic_block const new_bb = (basic_block) bb->aux;
2410 edge_iterator ei;
2411 gphi *phi;
2412 gphi_iterator si;
2413 edge new_edge;
2414 bool inserted = false;
2415
2416 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2417 {
2418 tree res, new_res;
2419 gphi *new_phi;
2420
2421 phi = si.phi ();
2422 res = PHI_RESULT (phi);
2423 new_res = res;
2424 if (!virtual_operand_p (res))
2425 {
2426 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2427 if (EDGE_COUNT (new_bb->preds) == 0)
2428 {
2429 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2430 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2431 }
2432 else
2433 {
2434 new_phi = create_phi_node (new_res, new_bb);
2435 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2436 {
2437 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2438 bb);
2439 tree arg;
2440 tree new_arg;
2441 edge_iterator ei2;
2442 location_t locus;
2443
2444 /* When doing partial cloning, we allow PHIs on the entry
2445 block as long as all the arguments are the same.
2446 Find any input edge to see argument to copy. */
2447 if (!old_edge)
2448 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2449 if (!old_edge->src->aux)
2450 break;
2451
2452 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2453 new_arg = arg;
2454 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2455 gcc_assert (new_arg);
2456 /* With return slot optimization we can end up with
2457 non-gimple (foo *)&this->m, fix that here. */
2458 if (TREE_CODE (new_arg) != SSA_NAME
2459 && TREE_CODE (new_arg) != FUNCTION_DECL
2460 && !is_gimple_val (new_arg))
2461 {
2462 gimple_seq stmts = NULL;
2463 new_arg = force_gimple_operand (new_arg, &stmts, true,
2464 NULL);
2465 gsi_insert_seq_on_edge (new_edge, stmts);
2466 inserted = true;
2467 }
2468 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2469 if (id->reset_location)
2470 locus = input_location;
2471 else
2472 locus = remap_location (locus, id);
2473 add_phi_arg (new_phi, new_arg, new_edge, locus);
2474 }
2475 }
2476 }
2477 }
2478
2479 /* Commit the delayed edge insertions. */
2480 if (inserted)
2481 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2482 gsi_commit_one_edge_insert (new_edge, NULL);
2483 }
2484
2485
2486 /* Wrapper for remap_decl so it can be used as a callback. */
2487
2488 static tree
2489 remap_decl_1 (tree decl, void *data)
2490 {
2491 return remap_decl (decl, (copy_body_data *) data);
2492 }
2493
2494 /* Build struct function and associated datastructures for the new clone
2495 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2496 the cfun to the function of new_fndecl (and current_function_decl too). */
2497
2498 static void
2499 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2500 {
2501 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2502
2503 if (!DECL_ARGUMENTS (new_fndecl))
2504 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2505 if (!DECL_RESULT (new_fndecl))
2506 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2507
2508 /* Register specific tree functions. */
2509 gimple_register_cfg_hooks ();
2510
2511 /* Get clean struct function. */
2512 push_struct_function (new_fndecl);
2513
2514 /* We will rebuild these, so just sanity check that they are empty. */
2515 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2516 gcc_assert (cfun->local_decls == NULL);
2517 gcc_assert (cfun->cfg == NULL);
2518 gcc_assert (cfun->decl == new_fndecl);
2519
2520 /* Copy items we preserve during cloning. */
2521 cfun->static_chain_decl = src_cfun->static_chain_decl;
2522 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2523 cfun->function_end_locus = src_cfun->function_end_locus;
2524 cfun->curr_properties = src_cfun->curr_properties;
2525 cfun->last_verified = src_cfun->last_verified;
2526 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2527 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2528 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2529 cfun->stdarg = src_cfun->stdarg;
2530 cfun->after_inlining = src_cfun->after_inlining;
2531 cfun->can_throw_non_call_exceptions
2532 = src_cfun->can_throw_non_call_exceptions;
2533 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2534 cfun->returns_struct = src_cfun->returns_struct;
2535 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2536
2537 init_empty_tree_cfg ();
2538
2539 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2540
2541 profile_count num = count;
2542 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2543 profile_count::adjust_for_ipa_scaling (&num, &den);
2544
2545 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2546 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2547 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2548 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2549 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2550 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2551 if (src_cfun->eh)
2552 init_eh_for_function ();
2553
2554 if (src_cfun->gimple_df)
2555 {
2556 init_tree_ssa (cfun);
2557 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2558 if (cfun->gimple_df->in_ssa_p)
2559 init_ssa_operands (cfun);
2560 }
2561 }
2562
2563 /* Helper function for copy_cfg_body. Move debug stmts from the end
2564 of NEW_BB to the beginning of successor basic blocks when needed. If the
2565 successor has multiple predecessors, reset them, otherwise keep
2566 their value. */
2567
2568 static void
2569 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2570 {
2571 edge e;
2572 edge_iterator ei;
2573 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2574
2575 if (gsi_end_p (si)
2576 || gsi_one_before_end_p (si)
2577 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2578 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2579 return;
2580
2581 FOR_EACH_EDGE (e, ei, new_bb->succs)
2582 {
2583 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2584 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2585 while (is_gimple_debug (gsi_stmt (ssi)))
2586 {
2587 gimple *stmt = gsi_stmt (ssi);
2588 gdebug *new_stmt;
2589 tree var;
2590 tree value;
2591
2592 /* For the last edge move the debug stmts instead of copying
2593 them. */
2594 if (ei_one_before_end_p (ei))
2595 {
2596 si = ssi;
2597 gsi_prev (&ssi);
2598 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2599 {
2600 gimple_debug_bind_reset_value (stmt);
2601 gimple_set_location (stmt, UNKNOWN_LOCATION);
2602 }
2603 gsi_remove (&si, false);
2604 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2605 continue;
2606 }
2607
2608 if (gimple_debug_bind_p (stmt))
2609 {
2610 var = gimple_debug_bind_get_var (stmt);
2611 if (single_pred_p (e->dest))
2612 {
2613 value = gimple_debug_bind_get_value (stmt);
2614 value = unshare_expr (value);
2615 new_stmt = gimple_build_debug_bind (var, value, stmt);
2616 }
2617 else
2618 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2619 }
2620 else if (gimple_debug_source_bind_p (stmt))
2621 {
2622 var = gimple_debug_source_bind_get_var (stmt);
2623 value = gimple_debug_source_bind_get_value (stmt);
2624 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2625 }
2626 else if (gimple_debug_nonbind_marker_p (stmt))
2627 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2628 else
2629 gcc_unreachable ();
2630 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2631 id->debug_stmts.safe_push (new_stmt);
2632 gsi_prev (&ssi);
2633 }
2634 }
2635 }
2636
2637 /* Make a copy of the sub-loops of SRC_PARENT and place them
2638 as siblings of DEST_PARENT. */
2639
2640 static void
2641 copy_loops (copy_body_data *id,
2642 struct loop *dest_parent, struct loop *src_parent)
2643 {
2644 struct loop *src_loop = src_parent->inner;
2645 while (src_loop)
2646 {
2647 if (!id->blocks_to_copy
2648 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2649 {
2650 struct loop *dest_loop = alloc_loop ();
2651
2652 /* Assign the new loop its header and latch and associate
2653 those with the new loop. */
2654 dest_loop->header = (basic_block)src_loop->header->aux;
2655 dest_loop->header->loop_father = dest_loop;
2656 if (src_loop->latch != NULL)
2657 {
2658 dest_loop->latch = (basic_block)src_loop->latch->aux;
2659 dest_loop->latch->loop_father = dest_loop;
2660 }
2661
2662 /* Copy loop meta-data. */
2663 copy_loop_info (src_loop, dest_loop);
2664
2665 /* Finally place it into the loop array and the loop tree. */
2666 place_new_loop (cfun, dest_loop);
2667 flow_loop_tree_node_add (dest_parent, dest_loop);
2668
2669 dest_loop->safelen = src_loop->safelen;
2670 if (src_loop->unroll)
2671 {
2672 dest_loop->unroll = src_loop->unroll;
2673 cfun->has_unroll = true;
2674 }
2675 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2676 if (src_loop->force_vectorize)
2677 {
2678 dest_loop->force_vectorize = true;
2679 cfun->has_force_vectorize_loops = true;
2680 }
2681 if (src_loop->simduid)
2682 {
2683 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2684 cfun->has_simduid_loops = true;
2685 }
2686
2687 /* Recurse. */
2688 copy_loops (id, dest_loop, src_loop);
2689 }
2690 src_loop = src_loop->next;
2691 }
2692 }
2693
2694 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2695
2696 void
2697 redirect_all_calls (copy_body_data * id, basic_block bb)
2698 {
2699 gimple_stmt_iterator si;
2700 gimple *last = last_stmt (bb);
2701 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2702 {
2703 gimple *stmt = gsi_stmt (si);
2704 if (is_gimple_call (stmt))
2705 {
2706 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2707 if (edge)
2708 {
2709 edge->redirect_call_stmt_to_callee ();
2710 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2711 gimple_purge_dead_eh_edges (bb);
2712 }
2713 }
2714 }
2715 }
2716
2717 /* Make a copy of the body of FN so that it can be inserted inline in
2718 another function. Walks FN via CFG, returns new fndecl. */
2719
2720 static tree
2721 copy_cfg_body (copy_body_data * id,
2722 basic_block entry_block_map, basic_block exit_block_map,
2723 basic_block new_entry)
2724 {
2725 tree callee_fndecl = id->src_fn;
2726 /* Original cfun for the callee, doesn't change. */
2727 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2728 struct function *cfun_to_copy;
2729 basic_block bb;
2730 tree new_fndecl = NULL;
2731 bool need_debug_cleanup = false;
2732 int last;
2733 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2734 profile_count num = entry_block_map->count;
2735
2736 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2737
2738 /* Register specific tree functions. */
2739 gimple_register_cfg_hooks ();
2740
2741 /* If we are inlining just region of the function, make sure to connect
2742 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2743 part of loop, we must compute frequency and probability of
2744 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2745 probabilities of edges incoming from nonduplicated region. */
2746 if (new_entry)
2747 {
2748 edge e;
2749 edge_iterator ei;
2750 den = profile_count::zero ();
2751
2752 FOR_EACH_EDGE (e, ei, new_entry->preds)
2753 if (!e->src->aux)
2754 den += e->count ();
2755 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2756 }
2757
2758 profile_count::adjust_for_ipa_scaling (&num, &den);
2759
2760 /* Must have a CFG here at this point. */
2761 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2762 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2763
2764
2765 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2766 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2767 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2768 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2769
2770 /* Duplicate any exception-handling regions. */
2771 if (cfun->eh)
2772 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2773 remap_decl_1, id);
2774
2775 /* Use aux pointers to map the original blocks to copy. */
2776 FOR_EACH_BB_FN (bb, cfun_to_copy)
2777 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2778 {
2779 basic_block new_bb = copy_bb (id, bb, num, den);
2780 bb->aux = new_bb;
2781 new_bb->aux = bb;
2782 new_bb->loop_father = entry_block_map->loop_father;
2783 }
2784
2785 last = last_basic_block_for_fn (cfun);
2786
2787 /* Now that we've duplicated the blocks, duplicate their edges. */
2788 basic_block abnormal_goto_dest = NULL;
2789 if (id->call_stmt
2790 && stmt_can_make_abnormal_goto (id->call_stmt))
2791 {
2792 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2793
2794 bb = gimple_bb (id->call_stmt);
2795 gsi_next (&gsi);
2796 if (gsi_end_p (gsi))
2797 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2798 }
2799 FOR_ALL_BB_FN (bb, cfun_to_copy)
2800 if (!id->blocks_to_copy
2801 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2802 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2803 abnormal_goto_dest, id);
2804
2805 if (new_entry)
2806 {
2807 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2808 EDGE_FALLTHRU);
2809 e->probability = profile_probability::always ();
2810 }
2811
2812 /* Duplicate the loop tree, if available and wanted. */
2813 if (loops_for_fn (src_cfun) != NULL
2814 && current_loops != NULL)
2815 {
2816 copy_loops (id, entry_block_map->loop_father,
2817 get_loop (src_cfun, 0));
2818 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2819 loops_state_set (LOOPS_NEED_FIXUP);
2820 }
2821
2822 /* If the loop tree in the source function needed fixup, mark the
2823 destination loop tree for fixup, too. */
2824 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2825 loops_state_set (LOOPS_NEED_FIXUP);
2826
2827 if (gimple_in_ssa_p (cfun))
2828 FOR_ALL_BB_FN (bb, cfun_to_copy)
2829 if (!id->blocks_to_copy
2830 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2831 copy_phis_for_bb (bb, id);
2832
2833 FOR_ALL_BB_FN (bb, cfun_to_copy)
2834 if (bb->aux)
2835 {
2836 if (need_debug_cleanup
2837 && bb->index != ENTRY_BLOCK
2838 && bb->index != EXIT_BLOCK)
2839 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2840 /* Update call edge destinations. This cannot be done before loop
2841 info is updated, because we may split basic blocks. */
2842 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2843 && bb->index != ENTRY_BLOCK
2844 && bb->index != EXIT_BLOCK)
2845 redirect_all_calls (id, (basic_block)bb->aux);
2846 ((basic_block)bb->aux)->aux = NULL;
2847 bb->aux = NULL;
2848 }
2849
2850 /* Zero out AUX fields of newly created block during EH edge
2851 insertion. */
2852 for (; last < last_basic_block_for_fn (cfun); last++)
2853 {
2854 if (need_debug_cleanup)
2855 maybe_move_debug_stmts_to_successors (id,
2856 BASIC_BLOCK_FOR_FN (cfun, last));
2857 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2858 /* Update call edge destinations. This cannot be done before loop
2859 info is updated, because we may split basic blocks. */
2860 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2861 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2862 }
2863 entry_block_map->aux = NULL;
2864 exit_block_map->aux = NULL;
2865
2866 if (id->eh_map)
2867 {
2868 delete id->eh_map;
2869 id->eh_map = NULL;
2870 }
2871 if (id->dependence_map)
2872 {
2873 delete id->dependence_map;
2874 id->dependence_map = NULL;
2875 }
2876
2877 return new_fndecl;
2878 }
2879
2880 /* Copy the debug STMT using ID. We deal with these statements in a
2881 special way: if any variable in their VALUE expression wasn't
2882 remapped yet, we won't remap it, because that would get decl uids
2883 out of sync, causing codegen differences between -g and -g0. If
2884 this arises, we drop the VALUE expression altogether. */
2885
2886 static void
2887 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2888 {
2889 tree t, *n;
2890 struct walk_stmt_info wi;
2891
2892 if (gimple_block (stmt))
2893 {
2894 n = id->decl_map->get (gimple_block (stmt));
2895 gimple_set_block (stmt, n ? *n : id->block);
2896 }
2897
2898 if (gimple_debug_nonbind_marker_p (stmt))
2899 return;
2900
2901 /* Remap all the operands in COPY. */
2902 memset (&wi, 0, sizeof (wi));
2903 wi.info = id;
2904
2905 processing_debug_stmt = 1;
2906
2907 if (gimple_debug_source_bind_p (stmt))
2908 t = gimple_debug_source_bind_get_var (stmt);
2909 else if (gimple_debug_bind_p (stmt))
2910 t = gimple_debug_bind_get_var (stmt);
2911 else
2912 gcc_unreachable ();
2913
2914 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2915 && (n = id->debug_map->get (t)))
2916 {
2917 gcc_assert (VAR_P (*n));
2918 t = *n;
2919 }
2920 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
2921 /* T is a non-localized variable. */;
2922 else
2923 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2924
2925 if (gimple_debug_bind_p (stmt))
2926 {
2927 gimple_debug_bind_set_var (stmt, t);
2928
2929 if (gimple_debug_bind_has_value_p (stmt))
2930 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2931 remap_gimple_op_r, &wi, NULL);
2932
2933 /* Punt if any decl couldn't be remapped. */
2934 if (processing_debug_stmt < 0)
2935 gimple_debug_bind_reset_value (stmt);
2936 }
2937 else if (gimple_debug_source_bind_p (stmt))
2938 {
2939 gimple_debug_source_bind_set_var (stmt, t);
2940 /* When inlining and source bind refers to one of the optimized
2941 away parameters, change the source bind into normal debug bind
2942 referring to the corresponding DEBUG_EXPR_DECL that should have
2943 been bound before the call stmt. */
2944 t = gimple_debug_source_bind_get_value (stmt);
2945 if (t != NULL_TREE
2946 && TREE_CODE (t) == PARM_DECL
2947 && id->call_stmt)
2948 {
2949 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2950 unsigned int i;
2951 if (debug_args != NULL)
2952 {
2953 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2954 if ((**debug_args)[i] == DECL_ORIGIN (t)
2955 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2956 {
2957 t = (**debug_args)[i + 1];
2958 stmt->subcode = GIMPLE_DEBUG_BIND;
2959 gimple_debug_bind_set_value (stmt, t);
2960 break;
2961 }
2962 }
2963 }
2964 if (gimple_debug_source_bind_p (stmt))
2965 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2966 remap_gimple_op_r, &wi, NULL);
2967 }
2968
2969 processing_debug_stmt = 0;
2970
2971 update_stmt (stmt);
2972 }
2973
2974 /* Process deferred debug stmts. In order to give values better odds
2975 of being successfully remapped, we delay the processing of debug
2976 stmts until all other stmts that might require remapping are
2977 processed. */
2978
2979 static void
2980 copy_debug_stmts (copy_body_data *id)
2981 {
2982 size_t i;
2983 gdebug *stmt;
2984
2985 if (!id->debug_stmts.exists ())
2986 return;
2987
2988 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2989 copy_debug_stmt (stmt, id);
2990
2991 id->debug_stmts.release ();
2992 }
2993
2994 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2995 another function. */
2996
2997 static tree
2998 copy_tree_body (copy_body_data *id)
2999 {
3000 tree fndecl = id->src_fn;
3001 tree body = DECL_SAVED_TREE (fndecl);
3002
3003 walk_tree (&body, copy_tree_body_r, id, NULL);
3004
3005 return body;
3006 }
3007
3008 /* Make a copy of the body of FN so that it can be inserted inline in
3009 another function. */
3010
3011 static tree
3012 copy_body (copy_body_data *id,
3013 basic_block entry_block_map, basic_block exit_block_map,
3014 basic_block new_entry)
3015 {
3016 tree fndecl = id->src_fn;
3017 tree body;
3018
3019 /* If this body has a CFG, walk CFG and copy. */
3020 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3021 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3022 new_entry);
3023 copy_debug_stmts (id);
3024
3025 return body;
3026 }
3027
3028 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3029 defined in function FN, or of a data member thereof. */
3030
3031 static bool
3032 self_inlining_addr_expr (tree value, tree fn)
3033 {
3034 tree var;
3035
3036 if (TREE_CODE (value) != ADDR_EXPR)
3037 return false;
3038
3039 var = get_base_address (TREE_OPERAND (value, 0));
3040
3041 return var && auto_var_in_fn_p (var, fn);
3042 }
3043
3044 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3045 lexical block and line number information from base_stmt, if given,
3046 or from the last stmt of the block otherwise. */
3047
3048 static gimple *
3049 insert_init_debug_bind (copy_body_data *id,
3050 basic_block bb, tree var, tree value,
3051 gimple *base_stmt)
3052 {
3053 gimple *note;
3054 gimple_stmt_iterator gsi;
3055 tree tracked_var;
3056
3057 if (!gimple_in_ssa_p (id->src_cfun))
3058 return NULL;
3059
3060 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3061 return NULL;
3062
3063 tracked_var = target_for_debug_bind (var);
3064 if (!tracked_var)
3065 return NULL;
3066
3067 if (bb)
3068 {
3069 gsi = gsi_last_bb (bb);
3070 if (!base_stmt && !gsi_end_p (gsi))
3071 base_stmt = gsi_stmt (gsi);
3072 }
3073
3074 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3075
3076 if (bb)
3077 {
3078 if (!gsi_end_p (gsi))
3079 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3080 else
3081 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3082 }
3083
3084 return note;
3085 }
3086
3087 static void
3088 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3089 {
3090 /* If VAR represents a zero-sized variable, it's possible that the
3091 assignment statement may result in no gimple statements. */
3092 if (init_stmt)
3093 {
3094 gimple_stmt_iterator si = gsi_last_bb (bb);
3095
3096 /* We can end up with init statements that store to a non-register
3097 from a rhs with a conversion. Handle that here by forcing the
3098 rhs into a temporary. gimple_regimplify_operands is not
3099 prepared to do this for us. */
3100 if (!is_gimple_debug (init_stmt)
3101 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3102 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3103 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3104 {
3105 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3106 gimple_expr_type (init_stmt),
3107 gimple_assign_rhs1 (init_stmt));
3108 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3109 GSI_NEW_STMT);
3110 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3111 gimple_assign_set_rhs1 (init_stmt, rhs);
3112 }
3113 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3114 gimple_regimplify_operands (init_stmt, &si);
3115
3116 if (!is_gimple_debug (init_stmt))
3117 {
3118 tree def = gimple_assign_lhs (init_stmt);
3119 insert_init_debug_bind (id, bb, def, def, init_stmt);
3120 }
3121 }
3122 }
3123
3124 /* Initialize parameter P with VALUE. If needed, produce init statement
3125 at the end of BB. When BB is NULL, we return init statement to be
3126 output later. */
3127 static gimple *
3128 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3129 basic_block bb, tree *vars)
3130 {
3131 gimple *init_stmt = NULL;
3132 tree var;
3133 tree rhs = value;
3134 tree def = (gimple_in_ssa_p (cfun)
3135 ? ssa_default_def (id->src_cfun, p) : NULL);
3136
3137 if (value
3138 && value != error_mark_node
3139 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3140 {
3141 /* If we can match up types by promotion/demotion do so. */
3142 if (fold_convertible_p (TREE_TYPE (p), value))
3143 rhs = fold_convert (TREE_TYPE (p), value);
3144 else
3145 {
3146 /* ??? For valid programs we should not end up here.
3147 Still if we end up with truly mismatched types here, fall back
3148 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3149 GIMPLE to the following passes. */
3150 if (!is_gimple_reg_type (TREE_TYPE (value))
3151 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3152 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3153 else
3154 rhs = build_zero_cst (TREE_TYPE (p));
3155 }
3156 }
3157
3158 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3159 here since the type of this decl must be visible to the calling
3160 function. */
3161 var = copy_decl_to_var (p, id);
3162
3163 /* Declare this new variable. */
3164 DECL_CHAIN (var) = *vars;
3165 *vars = var;
3166
3167 /* Make gimplifier happy about this variable. */
3168 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3169
3170 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3171 we would not need to create a new variable here at all, if it
3172 weren't for debug info. Still, we can just use the argument
3173 value. */
3174 if (TREE_READONLY (p)
3175 && !TREE_ADDRESSABLE (p)
3176 && value && !TREE_SIDE_EFFECTS (value)
3177 && !def)
3178 {
3179 /* We may produce non-gimple trees by adding NOPs or introduce
3180 invalid sharing when operand is not really constant.
3181 It is not big deal to prohibit constant propagation here as
3182 we will constant propagate in DOM1 pass anyway. */
3183 if (is_gimple_min_invariant (value)
3184 && useless_type_conversion_p (TREE_TYPE (p),
3185 TREE_TYPE (value))
3186 /* We have to be very careful about ADDR_EXPR. Make sure
3187 the base variable isn't a local variable of the inlined
3188 function, e.g., when doing recursive inlining, direct or
3189 mutually-recursive or whatever, which is why we don't
3190 just test whether fn == current_function_decl. */
3191 && ! self_inlining_addr_expr (value, fn))
3192 {
3193 insert_decl_map (id, p, value);
3194 insert_debug_decl_map (id, p, var);
3195 return insert_init_debug_bind (id, bb, var, value, NULL);
3196 }
3197 }
3198
3199 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3200 that way, when the PARM_DECL is encountered, it will be
3201 automatically replaced by the VAR_DECL. */
3202 insert_decl_map (id, p, var);
3203
3204 /* Even if P was TREE_READONLY, the new VAR should not be.
3205 In the original code, we would have constructed a
3206 temporary, and then the function body would have never
3207 changed the value of P. However, now, we will be
3208 constructing VAR directly. The constructor body may
3209 change its value multiple times as it is being
3210 constructed. Therefore, it must not be TREE_READONLY;
3211 the back-end assumes that TREE_READONLY variable is
3212 assigned to only once. */
3213 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3214 TREE_READONLY (var) = 0;
3215
3216 /* If there is no setup required and we are in SSA, take the easy route
3217 replacing all SSA names representing the function parameter by the
3218 SSA name passed to function.
3219
3220 We need to construct map for the variable anyway as it might be used
3221 in different SSA names when parameter is set in function.
3222
3223 Do replacement at -O0 for const arguments replaced by constant.
3224 This is important for builtin_constant_p and other construct requiring
3225 constant argument to be visible in inlined function body. */
3226 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3227 && (optimize
3228 || (TREE_READONLY (p)
3229 && is_gimple_min_invariant (rhs)))
3230 && (TREE_CODE (rhs) == SSA_NAME
3231 || is_gimple_min_invariant (rhs))
3232 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3233 {
3234 insert_decl_map (id, def, rhs);
3235 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3236 }
3237
3238 /* If the value of argument is never used, don't care about initializing
3239 it. */
3240 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3241 {
3242 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3243 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3244 }
3245
3246 /* Initialize this VAR_DECL from the equivalent argument. Convert
3247 the argument to the proper type in case it was promoted. */
3248 if (value)
3249 {
3250 if (rhs == error_mark_node)
3251 {
3252 insert_decl_map (id, p, var);
3253 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3254 }
3255
3256 STRIP_USELESS_TYPE_CONVERSION (rhs);
3257
3258 /* If we are in SSA form properly remap the default definition
3259 or assign to a dummy SSA name if the parameter is unused and
3260 we are not optimizing. */
3261 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3262 {
3263 if (def)
3264 {
3265 def = remap_ssa_name (def, id);
3266 init_stmt = gimple_build_assign (def, rhs);
3267 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3268 set_ssa_default_def (cfun, var, NULL);
3269 }
3270 else if (!optimize)
3271 {
3272 def = make_ssa_name (var);
3273 init_stmt = gimple_build_assign (def, rhs);
3274 }
3275 }
3276 else
3277 init_stmt = gimple_build_assign (var, rhs);
3278
3279 if (bb && init_stmt)
3280 insert_init_stmt (id, bb, init_stmt);
3281 }
3282 return init_stmt;
3283 }
3284
3285 /* Generate code to initialize the parameters of the function at the
3286 top of the stack in ID from the GIMPLE_CALL STMT. */
3287
3288 static void
3289 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3290 tree fn, basic_block bb)
3291 {
3292 tree parms;
3293 size_t i;
3294 tree p;
3295 tree vars = NULL_TREE;
3296 tree static_chain = gimple_call_chain (stmt);
3297
3298 /* Figure out what the parameters are. */
3299 parms = DECL_ARGUMENTS (fn);
3300
3301 /* Loop through the parameter declarations, replacing each with an
3302 equivalent VAR_DECL, appropriately initialized. */
3303 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3304 {
3305 tree val;
3306 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3307 setup_one_parameter (id, p, val, fn, bb, &vars);
3308 }
3309 /* After remapping parameters remap their types. This has to be done
3310 in a second loop over all parameters to appropriately remap
3311 variable sized arrays when the size is specified in a
3312 parameter following the array. */
3313 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3314 {
3315 tree *varp = id->decl_map->get (p);
3316 if (varp && VAR_P (*varp))
3317 {
3318 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3319 ? ssa_default_def (id->src_cfun, p) : NULL);
3320 tree var = *varp;
3321 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3322 /* Also remap the default definition if it was remapped
3323 to the default definition of the parameter replacement
3324 by the parameter setup. */
3325 if (def)
3326 {
3327 tree *defp = id->decl_map->get (def);
3328 if (defp
3329 && TREE_CODE (*defp) == SSA_NAME
3330 && SSA_NAME_VAR (*defp) == var)
3331 TREE_TYPE (*defp) = TREE_TYPE (var);
3332 }
3333 }
3334 }
3335
3336 /* Initialize the static chain. */
3337 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3338 gcc_assert (fn != current_function_decl);
3339 if (p)
3340 {
3341 /* No static chain? Seems like a bug in tree-nested.c. */
3342 gcc_assert (static_chain);
3343
3344 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3345 }
3346
3347 declare_inline_vars (id->block, vars);
3348 }
3349
3350
3351 /* Declare a return variable to replace the RESULT_DECL for the
3352 function we are calling. An appropriate DECL_STMT is returned.
3353 The USE_STMT is filled to contain a use of the declaration to
3354 indicate the return value of the function.
3355
3356 RETURN_SLOT, if non-null is place where to store the result. It
3357 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3358 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3359
3360 The return value is a (possibly null) value that holds the result
3361 as seen by the caller. */
3362
3363 static tree
3364 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3365 basic_block entry_bb)
3366 {
3367 tree callee = id->src_fn;
3368 tree result = DECL_RESULT (callee);
3369 tree callee_type = TREE_TYPE (result);
3370 tree caller_type;
3371 tree var, use;
3372
3373 /* Handle type-mismatches in the function declaration return type
3374 vs. the call expression. */
3375 if (modify_dest)
3376 caller_type = TREE_TYPE (modify_dest);
3377 else
3378 caller_type = TREE_TYPE (TREE_TYPE (callee));
3379
3380 /* We don't need to do anything for functions that don't return anything. */
3381 if (VOID_TYPE_P (callee_type))
3382 return NULL_TREE;
3383
3384 /* If there was a return slot, then the return value is the
3385 dereferenced address of that object. */
3386 if (return_slot)
3387 {
3388 /* The front end shouldn't have used both return_slot and
3389 a modify expression. */
3390 gcc_assert (!modify_dest);
3391 if (DECL_BY_REFERENCE (result))
3392 {
3393 tree return_slot_addr = build_fold_addr_expr (return_slot);
3394 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3395
3396 /* We are going to construct *&return_slot and we can't do that
3397 for variables believed to be not addressable.
3398
3399 FIXME: This check possibly can match, because values returned
3400 via return slot optimization are not believed to have address
3401 taken by alias analysis. */
3402 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3403 var = return_slot_addr;
3404 }
3405 else
3406 {
3407 var = return_slot;
3408 gcc_assert (TREE_CODE (var) != SSA_NAME);
3409 if (TREE_ADDRESSABLE (result))
3410 mark_addressable (var);
3411 }
3412 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3413 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3414 && !DECL_GIMPLE_REG_P (result)
3415 && DECL_P (var))
3416 DECL_GIMPLE_REG_P (var) = 0;
3417 use = NULL;
3418 goto done;
3419 }
3420
3421 /* All types requiring non-trivial constructors should have been handled. */
3422 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3423
3424 /* Attempt to avoid creating a new temporary variable. */
3425 if (modify_dest
3426 && TREE_CODE (modify_dest) != SSA_NAME)
3427 {
3428 bool use_it = false;
3429
3430 /* We can't use MODIFY_DEST if there's type promotion involved. */
3431 if (!useless_type_conversion_p (callee_type, caller_type))
3432 use_it = false;
3433
3434 /* ??? If we're assigning to a variable sized type, then we must
3435 reuse the destination variable, because we've no good way to
3436 create variable sized temporaries at this point. */
3437 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3438 use_it = true;
3439
3440 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3441 reuse it as the result of the call directly. Don't do this if
3442 it would promote MODIFY_DEST to addressable. */
3443 else if (TREE_ADDRESSABLE (result))
3444 use_it = false;
3445 else
3446 {
3447 tree base_m = get_base_address (modify_dest);
3448
3449 /* If the base isn't a decl, then it's a pointer, and we don't
3450 know where that's going to go. */
3451 if (!DECL_P (base_m))
3452 use_it = false;
3453 else if (is_global_var (base_m))
3454 use_it = false;
3455 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3456 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3457 && !DECL_GIMPLE_REG_P (result)
3458 && DECL_GIMPLE_REG_P (base_m))
3459 use_it = false;
3460 else if (!TREE_ADDRESSABLE (base_m))
3461 use_it = true;
3462 }
3463
3464 if (use_it)
3465 {
3466 var = modify_dest;
3467 use = NULL;
3468 goto done;
3469 }
3470 }
3471
3472 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3473
3474 var = copy_result_decl_to_var (result, id);
3475 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3476
3477 /* Do not have the rest of GCC warn about this variable as it should
3478 not be visible to the user. */
3479 TREE_NO_WARNING (var) = 1;
3480
3481 declare_inline_vars (id->block, var);
3482
3483 /* Build the use expr. If the return type of the function was
3484 promoted, convert it back to the expected type. */
3485 use = var;
3486 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3487 {
3488 /* If we can match up types by promotion/demotion do so. */
3489 if (fold_convertible_p (caller_type, var))
3490 use = fold_convert (caller_type, var);
3491 else
3492 {
3493 /* ??? For valid programs we should not end up here.
3494 Still if we end up with truly mismatched types here, fall back
3495 to using a MEM_REF to not leak invalid GIMPLE to the following
3496 passes. */
3497 /* Prevent var from being written into SSA form. */
3498 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3499 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3500 DECL_GIMPLE_REG_P (var) = false;
3501 else if (is_gimple_reg_type (TREE_TYPE (var)))
3502 TREE_ADDRESSABLE (var) = true;
3503 use = fold_build2 (MEM_REF, caller_type,
3504 build_fold_addr_expr (var),
3505 build_int_cst (ptr_type_node, 0));
3506 }
3507 }
3508
3509 STRIP_USELESS_TYPE_CONVERSION (use);
3510
3511 if (DECL_BY_REFERENCE (result))
3512 {
3513 TREE_ADDRESSABLE (var) = 1;
3514 var = build_fold_addr_expr (var);
3515 }
3516
3517 done:
3518 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3519 way, when the RESULT_DECL is encountered, it will be
3520 automatically replaced by the VAR_DECL.
3521
3522 When returning by reference, ensure that RESULT_DECL remaps to
3523 gimple_val. */
3524 if (DECL_BY_REFERENCE (result)
3525 && !is_gimple_val (var))
3526 {
3527 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3528 insert_decl_map (id, result, temp);
3529 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3530 it's default_def SSA_NAME. */
3531 if (gimple_in_ssa_p (id->src_cfun)
3532 && is_gimple_reg (result))
3533 {
3534 temp = make_ssa_name (temp);
3535 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3536 }
3537 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3538 }
3539 else
3540 insert_decl_map (id, result, var);
3541
3542 /* Remember this so we can ignore it in remap_decls. */
3543 id->retvar = var;
3544 return use;
3545 }
3546
3547 /* Determine if the function can be copied. If so return NULL. If
3548 not return a string describng the reason for failure. */
3549
3550 const char *
3551 copy_forbidden (struct function *fun)
3552 {
3553 const char *reason = fun->cannot_be_copied_reason;
3554
3555 /* Only examine the function once. */
3556 if (fun->cannot_be_copied_set)
3557 return reason;
3558
3559 /* We cannot copy a function that receives a non-local goto
3560 because we cannot remap the destination label used in the
3561 function that is performing the non-local goto. */
3562 /* ??? Actually, this should be possible, if we work at it.
3563 No doubt there's just a handful of places that simply
3564 assume it doesn't happen and don't substitute properly. */
3565 if (fun->has_nonlocal_label)
3566 {
3567 reason = G_("function %q+F can never be copied "
3568 "because it receives a non-local goto");
3569 goto fail;
3570 }
3571
3572 if (fun->has_forced_label_in_static)
3573 {
3574 reason = G_("function %q+F can never be copied because it saves "
3575 "address of local label in a static variable");
3576 goto fail;
3577 }
3578
3579 fail:
3580 fun->cannot_be_copied_reason = reason;
3581 fun->cannot_be_copied_set = true;
3582 return reason;
3583 }
3584
3585
3586 static const char *inline_forbidden_reason;
3587
3588 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3589 iff a function cannot be inlined. Also sets the reason why. */
3590
3591 static tree
3592 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3593 struct walk_stmt_info *wip)
3594 {
3595 tree fn = (tree) wip->info;
3596 tree t;
3597 gimple *stmt = gsi_stmt (*gsi);
3598
3599 switch (gimple_code (stmt))
3600 {
3601 case GIMPLE_CALL:
3602 /* Refuse to inline alloca call unless user explicitly forced so as
3603 this may change program's memory overhead drastically when the
3604 function using alloca is called in loop. In GCC present in
3605 SPEC2000 inlining into schedule_block cause it to require 2GB of
3606 RAM instead of 256MB. Don't do so for alloca calls emitted for
3607 VLA objects as those can't cause unbounded growth (they're always
3608 wrapped inside stack_save/stack_restore regions. */
3609 if (gimple_maybe_alloca_call_p (stmt)
3610 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3611 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3612 {
3613 inline_forbidden_reason
3614 = G_("function %q+F can never be inlined because it uses "
3615 "alloca (override using the always_inline attribute)");
3616 *handled_ops_p = true;
3617 return fn;
3618 }
3619
3620 t = gimple_call_fndecl (stmt);
3621 if (t == NULL_TREE)
3622 break;
3623
3624 /* We cannot inline functions that call setjmp. */
3625 if (setjmp_call_p (t))
3626 {
3627 inline_forbidden_reason
3628 = G_("function %q+F can never be inlined because it uses setjmp");
3629 *handled_ops_p = true;
3630 return t;
3631 }
3632
3633 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3634 switch (DECL_FUNCTION_CODE (t))
3635 {
3636 /* We cannot inline functions that take a variable number of
3637 arguments. */
3638 case BUILT_IN_VA_START:
3639 case BUILT_IN_NEXT_ARG:
3640 case BUILT_IN_VA_END:
3641 inline_forbidden_reason
3642 = G_("function %q+F can never be inlined because it "
3643 "uses variable argument lists");
3644 *handled_ops_p = true;
3645 return t;
3646
3647 case BUILT_IN_LONGJMP:
3648 /* We can't inline functions that call __builtin_longjmp at
3649 all. The non-local goto machinery really requires the
3650 destination be in a different function. If we allow the
3651 function calling __builtin_longjmp to be inlined into the
3652 function calling __builtin_setjmp, Things will Go Awry. */
3653 inline_forbidden_reason
3654 = G_("function %q+F can never be inlined because "
3655 "it uses setjmp-longjmp exception handling");
3656 *handled_ops_p = true;
3657 return t;
3658
3659 case BUILT_IN_NONLOCAL_GOTO:
3660 /* Similarly. */
3661 inline_forbidden_reason
3662 = G_("function %q+F can never be inlined because "
3663 "it uses non-local goto");
3664 *handled_ops_p = true;
3665 return t;
3666
3667 case BUILT_IN_RETURN:
3668 case BUILT_IN_APPLY_ARGS:
3669 /* If a __builtin_apply_args caller would be inlined,
3670 it would be saving arguments of the function it has
3671 been inlined into. Similarly __builtin_return would
3672 return from the function the inline has been inlined into. */
3673 inline_forbidden_reason
3674 = G_("function %q+F can never be inlined because "
3675 "it uses __builtin_return or __builtin_apply_args");
3676 *handled_ops_p = true;
3677 return t;
3678
3679 default:
3680 break;
3681 }
3682 break;
3683
3684 case GIMPLE_GOTO:
3685 t = gimple_goto_dest (stmt);
3686
3687 /* We will not inline a function which uses computed goto. The
3688 addresses of its local labels, which may be tucked into
3689 global storage, are of course not constant across
3690 instantiations, which causes unexpected behavior. */
3691 if (TREE_CODE (t) != LABEL_DECL)
3692 {
3693 inline_forbidden_reason
3694 = G_("function %q+F can never be inlined "
3695 "because it contains a computed goto");
3696 *handled_ops_p = true;
3697 return t;
3698 }
3699 break;
3700
3701 default:
3702 break;
3703 }
3704
3705 *handled_ops_p = false;
3706 return NULL_TREE;
3707 }
3708
3709 /* Return true if FNDECL is a function that cannot be inlined into
3710 another one. */
3711
3712 static bool
3713 inline_forbidden_p (tree fndecl)
3714 {
3715 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3716 struct walk_stmt_info wi;
3717 basic_block bb;
3718 bool forbidden_p = false;
3719
3720 /* First check for shared reasons not to copy the code. */
3721 inline_forbidden_reason = copy_forbidden (fun);
3722 if (inline_forbidden_reason != NULL)
3723 return true;
3724
3725 /* Next, walk the statements of the function looking for
3726 constraucts we can't handle, or are non-optimal for inlining. */
3727 hash_set<tree> visited_nodes;
3728 memset (&wi, 0, sizeof (wi));
3729 wi.info = (void *) fndecl;
3730 wi.pset = &visited_nodes;
3731
3732 FOR_EACH_BB_FN (bb, fun)
3733 {
3734 gimple *ret;
3735 gimple_seq seq = bb_seq (bb);
3736 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3737 forbidden_p = (ret != NULL);
3738 if (forbidden_p)
3739 break;
3740 }
3741
3742 return forbidden_p;
3743 }
3744 \f
3745 /* Return false if the function FNDECL cannot be inlined on account of its
3746 attributes, true otherwise. */
3747 static bool
3748 function_attribute_inlinable_p (const_tree fndecl)
3749 {
3750 if (targetm.attribute_table)
3751 {
3752 const_tree a;
3753
3754 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3755 {
3756 const_tree name = TREE_PURPOSE (a);
3757 int i;
3758
3759 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3760 if (is_attribute_p (targetm.attribute_table[i].name, name))
3761 return targetm.function_attribute_inlinable_p (fndecl);
3762 }
3763 }
3764
3765 return true;
3766 }
3767
3768 /* Returns nonzero if FN is a function that does not have any
3769 fundamental inline blocking properties. */
3770
3771 bool
3772 tree_inlinable_function_p (tree fn)
3773 {
3774 bool inlinable = true;
3775 bool do_warning;
3776 tree always_inline;
3777
3778 /* If we've already decided this function shouldn't be inlined,
3779 there's no need to check again. */
3780 if (DECL_UNINLINABLE (fn))
3781 return false;
3782
3783 /* We only warn for functions declared `inline' by the user. */
3784 do_warning = (warn_inline
3785 && DECL_DECLARED_INLINE_P (fn)
3786 && !DECL_NO_INLINE_WARNING_P (fn)
3787 && !DECL_IN_SYSTEM_HEADER (fn));
3788
3789 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3790
3791 if (flag_no_inline
3792 && always_inline == NULL)
3793 {
3794 if (do_warning)
3795 warning (OPT_Winline, "function %q+F can never be inlined because it "
3796 "is suppressed using -fno-inline", fn);
3797 inlinable = false;
3798 }
3799
3800 else if (!function_attribute_inlinable_p (fn))
3801 {
3802 if (do_warning)
3803 warning (OPT_Winline, "function %q+F can never be inlined because it "
3804 "uses attributes conflicting with inlining", fn);
3805 inlinable = false;
3806 }
3807
3808 else if (inline_forbidden_p (fn))
3809 {
3810 /* See if we should warn about uninlinable functions. Previously,
3811 some of these warnings would be issued while trying to expand
3812 the function inline, but that would cause multiple warnings
3813 about functions that would for example call alloca. But since
3814 this a property of the function, just one warning is enough.
3815 As a bonus we can now give more details about the reason why a
3816 function is not inlinable. */
3817 if (always_inline)
3818 error (inline_forbidden_reason, fn);
3819 else if (do_warning)
3820 warning (OPT_Winline, inline_forbidden_reason, fn);
3821
3822 inlinable = false;
3823 }
3824
3825 /* Squirrel away the result so that we don't have to check again. */
3826 DECL_UNINLINABLE (fn) = !inlinable;
3827
3828 return inlinable;
3829 }
3830
3831 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3832 word size and take possible memcpy call into account and return
3833 cost based on whether optimizing for size or speed according to SPEED_P. */
3834
3835 int
3836 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3837 {
3838 HOST_WIDE_INT size;
3839
3840 gcc_assert (!VOID_TYPE_P (type));
3841
3842 if (TREE_CODE (type) == VECTOR_TYPE)
3843 {
3844 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3845 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3846 int orig_mode_size
3847 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3848 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3849 return ((orig_mode_size + simd_mode_size - 1)
3850 / simd_mode_size);
3851 }
3852
3853 size = int_size_in_bytes (type);
3854
3855 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3856 /* Cost of a memcpy call, 3 arguments and the call. */
3857 return 4;
3858 else
3859 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3860 }
3861
3862 /* Returns cost of operation CODE, according to WEIGHTS */
3863
3864 static int
3865 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3866 tree op1 ATTRIBUTE_UNUSED, tree op2)
3867 {
3868 switch (code)
3869 {
3870 /* These are "free" conversions, or their presumed cost
3871 is folded into other operations. */
3872 case RANGE_EXPR:
3873 CASE_CONVERT:
3874 case COMPLEX_EXPR:
3875 case PAREN_EXPR:
3876 case VIEW_CONVERT_EXPR:
3877 return 0;
3878
3879 /* Assign cost of 1 to usual operations.
3880 ??? We may consider mapping RTL costs to this. */
3881 case COND_EXPR:
3882 case VEC_COND_EXPR:
3883 case VEC_PERM_EXPR:
3884
3885 case PLUS_EXPR:
3886 case POINTER_PLUS_EXPR:
3887 case POINTER_DIFF_EXPR:
3888 case MINUS_EXPR:
3889 case MULT_EXPR:
3890 case MULT_HIGHPART_EXPR:
3891
3892 case ADDR_SPACE_CONVERT_EXPR:
3893 case FIXED_CONVERT_EXPR:
3894 case FIX_TRUNC_EXPR:
3895
3896 case NEGATE_EXPR:
3897 case FLOAT_EXPR:
3898 case MIN_EXPR:
3899 case MAX_EXPR:
3900 case ABS_EXPR:
3901 case ABSU_EXPR:
3902
3903 case LSHIFT_EXPR:
3904 case RSHIFT_EXPR:
3905 case LROTATE_EXPR:
3906 case RROTATE_EXPR:
3907
3908 case BIT_IOR_EXPR:
3909 case BIT_XOR_EXPR:
3910 case BIT_AND_EXPR:
3911 case BIT_NOT_EXPR:
3912
3913 case TRUTH_ANDIF_EXPR:
3914 case TRUTH_ORIF_EXPR:
3915 case TRUTH_AND_EXPR:
3916 case TRUTH_OR_EXPR:
3917 case TRUTH_XOR_EXPR:
3918 case TRUTH_NOT_EXPR:
3919
3920 case LT_EXPR:
3921 case LE_EXPR:
3922 case GT_EXPR:
3923 case GE_EXPR:
3924 case EQ_EXPR:
3925 case NE_EXPR:
3926 case ORDERED_EXPR:
3927 case UNORDERED_EXPR:
3928
3929 case UNLT_EXPR:
3930 case UNLE_EXPR:
3931 case UNGT_EXPR:
3932 case UNGE_EXPR:
3933 case UNEQ_EXPR:
3934 case LTGT_EXPR:
3935
3936 case CONJ_EXPR:
3937
3938 case PREDECREMENT_EXPR:
3939 case PREINCREMENT_EXPR:
3940 case POSTDECREMENT_EXPR:
3941 case POSTINCREMENT_EXPR:
3942
3943 case REALIGN_LOAD_EXPR:
3944
3945 case WIDEN_SUM_EXPR:
3946 case WIDEN_MULT_EXPR:
3947 case DOT_PROD_EXPR:
3948 case SAD_EXPR:
3949 case WIDEN_MULT_PLUS_EXPR:
3950 case WIDEN_MULT_MINUS_EXPR:
3951 case WIDEN_LSHIFT_EXPR:
3952
3953 case VEC_WIDEN_MULT_HI_EXPR:
3954 case VEC_WIDEN_MULT_LO_EXPR:
3955 case VEC_WIDEN_MULT_EVEN_EXPR:
3956 case VEC_WIDEN_MULT_ODD_EXPR:
3957 case VEC_UNPACK_HI_EXPR:
3958 case VEC_UNPACK_LO_EXPR:
3959 case VEC_UNPACK_FLOAT_HI_EXPR:
3960 case VEC_UNPACK_FLOAT_LO_EXPR:
3961 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3962 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3963 case VEC_PACK_TRUNC_EXPR:
3964 case VEC_PACK_SAT_EXPR:
3965 case VEC_PACK_FIX_TRUNC_EXPR:
3966 case VEC_PACK_FLOAT_EXPR:
3967 case VEC_WIDEN_LSHIFT_HI_EXPR:
3968 case VEC_WIDEN_LSHIFT_LO_EXPR:
3969 case VEC_DUPLICATE_EXPR:
3970 case VEC_SERIES_EXPR:
3971
3972 return 1;
3973
3974 /* Few special cases of expensive operations. This is useful
3975 to avoid inlining on functions having too many of these. */
3976 case TRUNC_DIV_EXPR:
3977 case CEIL_DIV_EXPR:
3978 case FLOOR_DIV_EXPR:
3979 case ROUND_DIV_EXPR:
3980 case EXACT_DIV_EXPR:
3981 case TRUNC_MOD_EXPR:
3982 case CEIL_MOD_EXPR:
3983 case FLOOR_MOD_EXPR:
3984 case ROUND_MOD_EXPR:
3985 case RDIV_EXPR:
3986 if (TREE_CODE (op2) != INTEGER_CST)
3987 return weights->div_mod_cost;
3988 return 1;
3989
3990 /* Bit-field insertion needs several shift and mask operations. */
3991 case BIT_INSERT_EXPR:
3992 return 3;
3993
3994 default:
3995 /* We expect a copy assignment with no operator. */
3996 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3997 return 0;
3998 }
3999 }
4000
4001
4002 /* Estimate number of instructions that will be created by expanding
4003 the statements in the statement sequence STMTS.
4004 WEIGHTS contains weights attributed to various constructs. */
4005
4006 int
4007 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4008 {
4009 int cost;
4010 gimple_stmt_iterator gsi;
4011
4012 cost = 0;
4013 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4014 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4015
4016 return cost;
4017 }
4018
4019
4020 /* Estimate number of instructions that will be created by expanding STMT.
4021 WEIGHTS contains weights attributed to various constructs. */
4022
4023 int
4024 estimate_num_insns (gimple *stmt, eni_weights *weights)
4025 {
4026 unsigned cost, i;
4027 enum gimple_code code = gimple_code (stmt);
4028 tree lhs;
4029 tree rhs;
4030
4031 switch (code)
4032 {
4033 case GIMPLE_ASSIGN:
4034 /* Try to estimate the cost of assignments. We have three cases to
4035 deal with:
4036 1) Simple assignments to registers;
4037 2) Stores to things that must live in memory. This includes
4038 "normal" stores to scalars, but also assignments of large
4039 structures, or constructors of big arrays;
4040
4041 Let us look at the first two cases, assuming we have "a = b + C":
4042 <GIMPLE_ASSIGN <var_decl "a">
4043 <plus_expr <var_decl "b"> <constant C>>
4044 If "a" is a GIMPLE register, the assignment to it is free on almost
4045 any target, because "a" usually ends up in a real register. Hence
4046 the only cost of this expression comes from the PLUS_EXPR, and we
4047 can ignore the GIMPLE_ASSIGN.
4048 If "a" is not a GIMPLE register, the assignment to "a" will most
4049 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4050 of moving something into "a", which we compute using the function
4051 estimate_move_cost. */
4052 if (gimple_clobber_p (stmt))
4053 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4054
4055 lhs = gimple_assign_lhs (stmt);
4056 rhs = gimple_assign_rhs1 (stmt);
4057
4058 cost = 0;
4059
4060 /* Account for the cost of moving to / from memory. */
4061 if (gimple_store_p (stmt))
4062 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4063 if (gimple_assign_load_p (stmt))
4064 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4065
4066 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4067 gimple_assign_rhs1 (stmt),
4068 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4069 == GIMPLE_BINARY_RHS
4070 ? gimple_assign_rhs2 (stmt) : NULL);
4071 break;
4072
4073 case GIMPLE_COND:
4074 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4075 gimple_op (stmt, 0),
4076 gimple_op (stmt, 1));
4077 break;
4078
4079 case GIMPLE_SWITCH:
4080 {
4081 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4082 /* Take into account cost of the switch + guess 2 conditional jumps for
4083 each case label.
4084
4085 TODO: once the switch expansion logic is sufficiently separated, we can
4086 do better job on estimating cost of the switch. */
4087 if (weights->time_based)
4088 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4089 else
4090 cost = gimple_switch_num_labels (switch_stmt) * 2;
4091 }
4092 break;
4093
4094 case GIMPLE_CALL:
4095 {
4096 tree decl;
4097
4098 if (gimple_call_internal_p (stmt))
4099 return 0;
4100 else if ((decl = gimple_call_fndecl (stmt))
4101 && fndecl_built_in_p (decl))
4102 {
4103 /* Do not special case builtins where we see the body.
4104 This just confuse inliner. */
4105 struct cgraph_node *node;
4106 if (!(node = cgraph_node::get (decl))
4107 || node->definition)
4108 ;
4109 /* For buitins that are likely expanded to nothing or
4110 inlined do not account operand costs. */
4111 else if (is_simple_builtin (decl))
4112 return 0;
4113 else if (is_inexpensive_builtin (decl))
4114 return weights->target_builtin_call_cost;
4115 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4116 {
4117 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4118 specialize the cheap expansion we do here.
4119 ??? This asks for a more general solution. */
4120 switch (DECL_FUNCTION_CODE (decl))
4121 {
4122 case BUILT_IN_POW:
4123 case BUILT_IN_POWF:
4124 case BUILT_IN_POWL:
4125 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4126 && (real_equal
4127 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4128 &dconst2)))
4129 return estimate_operator_cost
4130 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4131 gimple_call_arg (stmt, 0));
4132 break;
4133
4134 default:
4135 break;
4136 }
4137 }
4138 }
4139
4140 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4141 if (gimple_call_lhs (stmt))
4142 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4143 weights->time_based);
4144 for (i = 0; i < gimple_call_num_args (stmt); i++)
4145 {
4146 tree arg = gimple_call_arg (stmt, i);
4147 cost += estimate_move_cost (TREE_TYPE (arg),
4148 weights->time_based);
4149 }
4150 break;
4151 }
4152
4153 case GIMPLE_RETURN:
4154 return weights->return_cost;
4155
4156 case GIMPLE_GOTO:
4157 case GIMPLE_LABEL:
4158 case GIMPLE_NOP:
4159 case GIMPLE_PHI:
4160 case GIMPLE_PREDICT:
4161 case GIMPLE_DEBUG:
4162 return 0;
4163
4164 case GIMPLE_ASM:
4165 {
4166 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4167 /* 1000 means infinity. This avoids overflows later
4168 with very long asm statements. */
4169 if (count > 1000)
4170 count = 1000;
4171 /* If this asm is asm inline, count anything as minimum size. */
4172 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4173 count = MIN (1, count);
4174 return MAX (1, count);
4175 }
4176
4177 case GIMPLE_RESX:
4178 /* This is either going to be an external function call with one
4179 argument, or two register copy statements plus a goto. */
4180 return 2;
4181
4182 case GIMPLE_EH_DISPATCH:
4183 /* ??? This is going to turn into a switch statement. Ideally
4184 we'd have a look at the eh region and estimate the number of
4185 edges involved. */
4186 return 10;
4187
4188 case GIMPLE_BIND:
4189 return estimate_num_insns_seq (
4190 gimple_bind_body (as_a <gbind *> (stmt)),
4191 weights);
4192
4193 case GIMPLE_EH_FILTER:
4194 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4195
4196 case GIMPLE_CATCH:
4197 return estimate_num_insns_seq (gimple_catch_handler (
4198 as_a <gcatch *> (stmt)),
4199 weights);
4200
4201 case GIMPLE_TRY:
4202 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4203 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4204
4205 /* OMP directives are generally very expensive. */
4206
4207 case GIMPLE_OMP_RETURN:
4208 case GIMPLE_OMP_SECTIONS_SWITCH:
4209 case GIMPLE_OMP_ATOMIC_STORE:
4210 case GIMPLE_OMP_CONTINUE:
4211 /* ...except these, which are cheap. */
4212 return 0;
4213
4214 case GIMPLE_OMP_ATOMIC_LOAD:
4215 return weights->omp_cost;
4216
4217 case GIMPLE_OMP_FOR:
4218 return (weights->omp_cost
4219 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4220 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4221
4222 case GIMPLE_OMP_PARALLEL:
4223 case GIMPLE_OMP_TASK:
4224 case GIMPLE_OMP_CRITICAL:
4225 case GIMPLE_OMP_MASTER:
4226 case GIMPLE_OMP_TASKGROUP:
4227 case GIMPLE_OMP_ORDERED:
4228 case GIMPLE_OMP_SECTION:
4229 case GIMPLE_OMP_SECTIONS:
4230 case GIMPLE_OMP_SINGLE:
4231 case GIMPLE_OMP_TARGET:
4232 case GIMPLE_OMP_TEAMS:
4233 return (weights->omp_cost
4234 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4235
4236 case GIMPLE_TRANSACTION:
4237 return (weights->tm_cost
4238 + estimate_num_insns_seq (gimple_transaction_body (
4239 as_a <gtransaction *> (stmt)),
4240 weights));
4241
4242 default:
4243 gcc_unreachable ();
4244 }
4245
4246 return cost;
4247 }
4248
4249 /* Estimate number of instructions that will be created by expanding
4250 function FNDECL. WEIGHTS contains weights attributed to various
4251 constructs. */
4252
4253 int
4254 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4255 {
4256 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4257 gimple_stmt_iterator bsi;
4258 basic_block bb;
4259 int n = 0;
4260
4261 gcc_assert (my_function && my_function->cfg);
4262 FOR_EACH_BB_FN (bb, my_function)
4263 {
4264 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4265 n += estimate_num_insns (gsi_stmt (bsi), weights);
4266 }
4267
4268 return n;
4269 }
4270
4271
4272 /* Initializes weights used by estimate_num_insns. */
4273
4274 void
4275 init_inline_once (void)
4276 {
4277 eni_size_weights.call_cost = 1;
4278 eni_size_weights.indirect_call_cost = 3;
4279 eni_size_weights.target_builtin_call_cost = 1;
4280 eni_size_weights.div_mod_cost = 1;
4281 eni_size_weights.omp_cost = 40;
4282 eni_size_weights.tm_cost = 10;
4283 eni_size_weights.time_based = false;
4284 eni_size_weights.return_cost = 1;
4285
4286 /* Estimating time for call is difficult, since we have no idea what the
4287 called function does. In the current uses of eni_time_weights,
4288 underestimating the cost does less harm than overestimating it, so
4289 we choose a rather small value here. */
4290 eni_time_weights.call_cost = 10;
4291 eni_time_weights.indirect_call_cost = 15;
4292 eni_time_weights.target_builtin_call_cost = 1;
4293 eni_time_weights.div_mod_cost = 10;
4294 eni_time_weights.omp_cost = 40;
4295 eni_time_weights.tm_cost = 40;
4296 eni_time_weights.time_based = true;
4297 eni_time_weights.return_cost = 2;
4298 }
4299
4300
4301 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4302
4303 static void
4304 prepend_lexical_block (tree current_block, tree new_block)
4305 {
4306 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4307 BLOCK_SUBBLOCKS (current_block) = new_block;
4308 BLOCK_SUPERCONTEXT (new_block) = current_block;
4309 }
4310
4311 /* Add local variables from CALLEE to CALLER. */
4312
4313 static inline void
4314 add_local_variables (struct function *callee, struct function *caller,
4315 copy_body_data *id)
4316 {
4317 tree var;
4318 unsigned ix;
4319
4320 FOR_EACH_LOCAL_DECL (callee, ix, var)
4321 if (!can_be_nonlocal (var, id))
4322 {
4323 tree new_var = remap_decl (var, id);
4324
4325 /* Remap debug-expressions. */
4326 if (VAR_P (new_var)
4327 && DECL_HAS_DEBUG_EXPR_P (var)
4328 && new_var != var)
4329 {
4330 tree tem = DECL_DEBUG_EXPR (var);
4331 bool old_regimplify = id->regimplify;
4332 id->remapping_type_depth++;
4333 walk_tree (&tem, copy_tree_body_r, id, NULL);
4334 id->remapping_type_depth--;
4335 id->regimplify = old_regimplify;
4336 SET_DECL_DEBUG_EXPR (new_var, tem);
4337 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4338 }
4339 add_local_decl (caller, new_var);
4340 }
4341 }
4342
4343 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4344 have brought in or introduced any debug stmts for SRCVAR. */
4345
4346 static inline void
4347 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4348 {
4349 tree *remappedvarp = id->decl_map->get (srcvar);
4350
4351 if (!remappedvarp)
4352 return;
4353
4354 if (!VAR_P (*remappedvarp))
4355 return;
4356
4357 if (*remappedvarp == id->retvar)
4358 return;
4359
4360 tree tvar = target_for_debug_bind (*remappedvarp);
4361 if (!tvar)
4362 return;
4363
4364 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4365 id->call_stmt);
4366 gimple_seq_add_stmt (bindings, stmt);
4367 }
4368
4369 /* For each inlined variable for which we may have debug bind stmts,
4370 add before GSI a final debug stmt resetting it, marking the end of
4371 its life, so that var-tracking knows it doesn't have to compute
4372 further locations for it. */
4373
4374 static inline void
4375 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4376 {
4377 tree var;
4378 unsigned ix;
4379 gimple_seq bindings = NULL;
4380
4381 if (!gimple_in_ssa_p (id->src_cfun))
4382 return;
4383
4384 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4385 return;
4386
4387 for (var = DECL_ARGUMENTS (id->src_fn);
4388 var; var = DECL_CHAIN (var))
4389 reset_debug_binding (id, var, &bindings);
4390
4391 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4392 reset_debug_binding (id, var, &bindings);
4393
4394 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4395 }
4396
4397 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4398
4399 static bool
4400 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4401 {
4402 tree use_retvar;
4403 tree fn;
4404 hash_map<tree, tree> *dst;
4405 hash_map<tree, tree> *st = NULL;
4406 tree return_slot;
4407 tree modify_dest;
4408 struct cgraph_edge *cg_edge;
4409 cgraph_inline_failed_t reason;
4410 basic_block return_block;
4411 edge e;
4412 gimple_stmt_iterator gsi, stmt_gsi;
4413 bool successfully_inlined = false;
4414 bool purge_dead_abnormal_edges;
4415 gcall *call_stmt;
4416 unsigned int prop_mask, src_properties;
4417 struct function *dst_cfun;
4418 tree simduid;
4419 use_operand_p use;
4420 gimple *simtenter_stmt = NULL;
4421 vec<tree> *simtvars_save;
4422
4423 /* The gimplifier uses input_location in too many places, such as
4424 internal_get_tmp_var (). */
4425 location_t saved_location = input_location;
4426 input_location = gimple_location (stmt);
4427
4428 /* From here on, we're only interested in CALL_EXPRs. */
4429 call_stmt = dyn_cast <gcall *> (stmt);
4430 if (!call_stmt)
4431 goto egress;
4432
4433 cg_edge = id->dst_node->get_edge (stmt);
4434 gcc_checking_assert (cg_edge);
4435 /* First, see if we can figure out what function is being called.
4436 If we cannot, then there is no hope of inlining the function. */
4437 if (cg_edge->indirect_unknown_callee)
4438 goto egress;
4439 fn = cg_edge->callee->decl;
4440 gcc_checking_assert (fn);
4441
4442 /* If FN is a declaration of a function in a nested scope that was
4443 globally declared inline, we don't set its DECL_INITIAL.
4444 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4445 C++ front-end uses it for cdtors to refer to their internal
4446 declarations, that are not real functions. Fortunately those
4447 don't have trees to be saved, so we can tell by checking their
4448 gimple_body. */
4449 if (!DECL_INITIAL (fn)
4450 && DECL_ABSTRACT_ORIGIN (fn)
4451 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4452 fn = DECL_ABSTRACT_ORIGIN (fn);
4453
4454 /* Don't try to inline functions that are not well-suited to inlining. */
4455 if (cg_edge->inline_failed)
4456 {
4457 reason = cg_edge->inline_failed;
4458 /* If this call was originally indirect, we do not want to emit any
4459 inlining related warnings or sorry messages because there are no
4460 guarantees regarding those. */
4461 if (cg_edge->indirect_inlining_edge)
4462 goto egress;
4463
4464 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4465 /* For extern inline functions that get redefined we always
4466 silently ignored always_inline flag. Better behavior would
4467 be to be able to keep both bodies and use extern inline body
4468 for inlining, but we can't do that because frontends overwrite
4469 the body. */
4470 && !cg_edge->callee->local.redefined_extern_inline
4471 /* During early inline pass, report only when optimization is
4472 not turned on. */
4473 && (symtab->global_info_ready
4474 || !optimize
4475 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4476 /* PR 20090218-1_0.c. Body can be provided by another module. */
4477 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4478 {
4479 error ("inlining failed in call to always_inline %q+F: %s", fn,
4480 cgraph_inline_failed_string (reason));
4481 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4482 inform (gimple_location (stmt), "called from here");
4483 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4484 inform (DECL_SOURCE_LOCATION (cfun->decl),
4485 "called from this function");
4486 }
4487 else if (warn_inline
4488 && DECL_DECLARED_INLINE_P (fn)
4489 && !DECL_NO_INLINE_WARNING_P (fn)
4490 && !DECL_IN_SYSTEM_HEADER (fn)
4491 && reason != CIF_UNSPECIFIED
4492 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4493 /* Do not warn about not inlined recursive calls. */
4494 && !cg_edge->recursive_p ()
4495 /* Avoid warnings during early inline pass. */
4496 && symtab->global_info_ready)
4497 {
4498 auto_diagnostic_group d;
4499 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4500 fn, _(cgraph_inline_failed_string (reason))))
4501 {
4502 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4503 inform (gimple_location (stmt), "called from here");
4504 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4505 inform (DECL_SOURCE_LOCATION (cfun->decl),
4506 "called from this function");
4507 }
4508 }
4509 goto egress;
4510 }
4511 id->src_node = cg_edge->callee;
4512
4513 /* If callee is thunk, all we need is to adjust the THIS pointer
4514 and redirect to function being thunked. */
4515 if (id->src_node->thunk.thunk_p)
4516 {
4517 cgraph_edge *edge;
4518 tree virtual_offset = NULL;
4519 profile_count count = cg_edge->count;
4520 tree op;
4521 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4522
4523 cg_edge->remove ();
4524 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4525 gimple_uid (stmt),
4526 profile_count::one (),
4527 profile_count::one (),
4528 true);
4529 edge->count = count;
4530 if (id->src_node->thunk.virtual_offset_p)
4531 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4532 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4533 NULL);
4534 gsi_insert_before (&iter, gimple_build_assign (op,
4535 gimple_call_arg (stmt, 0)),
4536 GSI_NEW_STMT);
4537 gcc_assert (id->src_node->thunk.this_adjusting);
4538 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4539 virtual_offset, id->src_node->thunk.indirect_offset);
4540
4541 gimple_call_set_arg (stmt, 0, op);
4542 gimple_call_set_fndecl (stmt, edge->callee->decl);
4543 update_stmt (stmt);
4544 id->src_node->remove ();
4545 expand_call_inline (bb, stmt, id);
4546 maybe_remove_unused_call_args (cfun, stmt);
4547 return true;
4548 }
4549 fn = cg_edge->callee->decl;
4550 cg_edge->callee->get_untransformed_body ();
4551
4552 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4553 cg_edge->callee->verify ();
4554
4555 /* We will be inlining this callee. */
4556 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4557 id->assign_stmts.create (0);
4558
4559 /* Update the callers EH personality. */
4560 if (DECL_FUNCTION_PERSONALITY (fn))
4561 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4562 = DECL_FUNCTION_PERSONALITY (fn);
4563
4564 /* Split the block before the GIMPLE_CALL. */
4565 stmt_gsi = gsi_for_stmt (stmt);
4566 gsi_prev (&stmt_gsi);
4567 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4568 bb = e->src;
4569 return_block = e->dest;
4570 remove_edge (e);
4571
4572 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4573 been the source of abnormal edges. In this case, schedule
4574 the removal of dead abnormal edges. */
4575 gsi = gsi_start_bb (return_block);
4576 gsi_next (&gsi);
4577 purge_dead_abnormal_edges = gsi_end_p (gsi);
4578
4579 stmt_gsi = gsi_start_bb (return_block);
4580
4581 /* Build a block containing code to initialize the arguments, the
4582 actual inline expansion of the body, and a label for the return
4583 statements within the function to jump to. The type of the
4584 statement expression is the return type of the function call.
4585 ??? If the call does not have an associated block then we will
4586 remap all callee blocks to NULL, effectively dropping most of
4587 its debug information. This should only happen for calls to
4588 artificial decls inserted by the compiler itself. We need to
4589 either link the inlined blocks into the caller block tree or
4590 not refer to them in any way to not break GC for locations. */
4591 if (gimple_block (stmt))
4592 {
4593 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4594 to make inlined_function_outer_scope_p return true on this BLOCK. */
4595 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4596 if (loc == UNKNOWN_LOCATION)
4597 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4598 if (loc == UNKNOWN_LOCATION)
4599 loc = BUILTINS_LOCATION;
4600 id->block = make_node (BLOCK);
4601 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4602 BLOCK_SOURCE_LOCATION (id->block) = loc;
4603 prepend_lexical_block (gimple_block (stmt), id->block);
4604 }
4605
4606 /* Local declarations will be replaced by their equivalents in this map. */
4607 st = id->decl_map;
4608 id->decl_map = new hash_map<tree, tree>;
4609 dst = id->debug_map;
4610 id->debug_map = NULL;
4611 if (flag_stack_reuse != SR_NONE)
4612 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4613
4614 /* Record the function we are about to inline. */
4615 id->src_fn = fn;
4616 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4617 id->reset_location = DECL_IGNORED_P (fn);
4618 id->call_stmt = call_stmt;
4619
4620 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4621 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4622 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4623 simtvars_save = id->dst_simt_vars;
4624 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4625 && (simduid = bb->loop_father->simduid) != NULL_TREE
4626 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4627 && single_imm_use (simduid, &use, &simtenter_stmt)
4628 && is_gimple_call (simtenter_stmt)
4629 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4630 vec_alloc (id->dst_simt_vars, 0);
4631 else
4632 id->dst_simt_vars = NULL;
4633
4634 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4635 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4636
4637 /* If the src function contains an IFN_VA_ARG, then so will the dst
4638 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4639 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4640 src_properties = id->src_cfun->curr_properties & prop_mask;
4641 if (src_properties != prop_mask)
4642 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4643
4644 gcc_assert (!id->src_cfun->after_inlining);
4645
4646 id->entry_bb = bb;
4647 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4648 {
4649 gimple_stmt_iterator si = gsi_last_bb (bb);
4650 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4651 NOT_TAKEN),
4652 GSI_NEW_STMT);
4653 }
4654 initialize_inlined_parameters (id, stmt, fn, bb);
4655 if (debug_nonbind_markers_p && debug_inline_points && id->block
4656 && inlined_function_outer_scope_p (id->block))
4657 {
4658 gimple_stmt_iterator si = gsi_last_bb (bb);
4659 gsi_insert_after (&si, gimple_build_debug_inline_entry
4660 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4661 GSI_NEW_STMT);
4662 }
4663
4664 if (DECL_INITIAL (fn))
4665 {
4666 if (gimple_block (stmt))
4667 {
4668 tree *var;
4669
4670 prepend_lexical_block (id->block,
4671 remap_blocks (DECL_INITIAL (fn), id));
4672 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4673 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4674 == NULL_TREE));
4675 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4676 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4677 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4678 under it. The parameters can be then evaluated in the debugger,
4679 but don't show in backtraces. */
4680 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4681 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4682 {
4683 tree v = *var;
4684 *var = TREE_CHAIN (v);
4685 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4686 BLOCK_VARS (id->block) = v;
4687 }
4688 else
4689 var = &TREE_CHAIN (*var);
4690 }
4691 else
4692 remap_blocks_to_null (DECL_INITIAL (fn), id);
4693 }
4694
4695 /* Return statements in the function body will be replaced by jumps
4696 to the RET_LABEL. */
4697 gcc_assert (DECL_INITIAL (fn));
4698 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4699
4700 /* Find the LHS to which the result of this call is assigned. */
4701 return_slot = NULL;
4702 if (gimple_call_lhs (stmt))
4703 {
4704 modify_dest = gimple_call_lhs (stmt);
4705
4706 /* The function which we are inlining might not return a value,
4707 in which case we should issue a warning that the function
4708 does not return a value. In that case the optimizers will
4709 see that the variable to which the value is assigned was not
4710 initialized. We do not want to issue a warning about that
4711 uninitialized variable. */
4712 if (DECL_P (modify_dest))
4713 TREE_NO_WARNING (modify_dest) = 1;
4714
4715 if (gimple_call_return_slot_opt_p (call_stmt))
4716 {
4717 return_slot = modify_dest;
4718 modify_dest = NULL;
4719 }
4720 }
4721 else
4722 modify_dest = NULL;
4723
4724 /* If we are inlining a call to the C++ operator new, we don't want
4725 to use type based alias analysis on the return value. Otherwise
4726 we may get confused if the compiler sees that the inlined new
4727 function returns a pointer which was just deleted. See bug
4728 33407. */
4729 if (DECL_IS_OPERATOR_NEW (fn))
4730 {
4731 return_slot = NULL;
4732 modify_dest = NULL;
4733 }
4734
4735 /* Declare the return variable for the function. */
4736 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4737
4738 /* Add local vars in this inlined callee to caller. */
4739 add_local_variables (id->src_cfun, cfun, id);
4740
4741 if (dump_enabled_p ())
4742 {
4743 char buf[128];
4744 snprintf (buf, sizeof(buf), "%4.2f",
4745 cg_edge->sreal_frequency ().to_double ());
4746 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
4747 call_stmt,
4748 "Inlining %C to %C with frequency %s\n",
4749 id->src_node, id->dst_node, buf);
4750 if (dump_file && (dump_flags & TDF_DETAILS))
4751 {
4752 id->src_node->dump (dump_file);
4753 id->dst_node->dump (dump_file);
4754 }
4755 }
4756
4757 /* This is it. Duplicate the callee body. Assume callee is
4758 pre-gimplified. Note that we must not alter the caller
4759 function in any way before this point, as this CALL_EXPR may be
4760 a self-referential call; if we're calling ourselves, we need to
4761 duplicate our body before altering anything. */
4762 copy_body (id, bb, return_block, NULL);
4763
4764 reset_debug_bindings (id, stmt_gsi);
4765
4766 if (flag_stack_reuse != SR_NONE)
4767 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4768 if (!TREE_THIS_VOLATILE (p))
4769 {
4770 tree *varp = id->decl_map->get (p);
4771 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4772 {
4773 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4774 gimple *clobber_stmt;
4775 TREE_THIS_VOLATILE (clobber) = 1;
4776 clobber_stmt = gimple_build_assign (*varp, clobber);
4777 gimple_set_location (clobber_stmt, gimple_location (stmt));
4778 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4779 }
4780 }
4781
4782 /* Reset the escaped solution. */
4783 if (cfun->gimple_df)
4784 pt_solution_reset (&cfun->gimple_df->escaped);
4785
4786 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4787 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4788 {
4789 size_t nargs = gimple_call_num_args (simtenter_stmt);
4790 vec<tree> *vars = id->dst_simt_vars;
4791 auto_vec<tree> newargs (nargs + vars->length ());
4792 for (size_t i = 0; i < nargs; i++)
4793 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4794 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4795 {
4796 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4797 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4798 }
4799 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4800 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4801 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4802 gsi_replace (&gsi, g, false);
4803 }
4804 vec_free (id->dst_simt_vars);
4805 id->dst_simt_vars = simtvars_save;
4806
4807 /* Clean up. */
4808 if (id->debug_map)
4809 {
4810 delete id->debug_map;
4811 id->debug_map = dst;
4812 }
4813 delete id->decl_map;
4814 id->decl_map = st;
4815
4816 /* Unlink the calls virtual operands before replacing it. */
4817 unlink_stmt_vdef (stmt);
4818 if (gimple_vdef (stmt)
4819 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4820 release_ssa_name (gimple_vdef (stmt));
4821
4822 /* If the inlined function returns a result that we care about,
4823 substitute the GIMPLE_CALL with an assignment of the return
4824 variable to the LHS of the call. That is, if STMT was
4825 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4826 if (use_retvar && gimple_call_lhs (stmt))
4827 {
4828 gimple *old_stmt = stmt;
4829 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4830 gimple_set_location (stmt, gimple_location (old_stmt));
4831 gsi_replace (&stmt_gsi, stmt, false);
4832 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4833 /* Append a clobber for id->retvar if easily possible. */
4834 if (flag_stack_reuse != SR_NONE
4835 && id->retvar
4836 && VAR_P (id->retvar)
4837 && id->retvar != return_slot
4838 && id->retvar != modify_dest
4839 && !TREE_THIS_VOLATILE (id->retvar)
4840 && !is_gimple_reg (id->retvar)
4841 && !stmt_ends_bb_p (stmt))
4842 {
4843 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4844 gimple *clobber_stmt;
4845 TREE_THIS_VOLATILE (clobber) = 1;
4846 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4847 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4848 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4849 }
4850 }
4851 else
4852 {
4853 /* Handle the case of inlining a function with no return
4854 statement, which causes the return value to become undefined. */
4855 if (gimple_call_lhs (stmt)
4856 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4857 {
4858 tree name = gimple_call_lhs (stmt);
4859 tree var = SSA_NAME_VAR (name);
4860 tree def = var ? ssa_default_def (cfun, var) : NULL;
4861
4862 if (def)
4863 {
4864 /* If the variable is used undefined, make this name
4865 undefined via a move. */
4866 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4867 gsi_replace (&stmt_gsi, stmt, true);
4868 }
4869 else
4870 {
4871 if (!var)
4872 {
4873 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4874 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4875 }
4876 /* Otherwise make this variable undefined. */
4877 gsi_remove (&stmt_gsi, true);
4878 set_ssa_default_def (cfun, var, name);
4879 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4880 }
4881 }
4882 /* Replace with a clobber for id->retvar. */
4883 else if (flag_stack_reuse != SR_NONE
4884 && id->retvar
4885 && VAR_P (id->retvar)
4886 && id->retvar != return_slot
4887 && id->retvar != modify_dest
4888 && !TREE_THIS_VOLATILE (id->retvar)
4889 && !is_gimple_reg (id->retvar))
4890 {
4891 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4892 gimple *clobber_stmt;
4893 TREE_THIS_VOLATILE (clobber) = 1;
4894 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4895 gimple_set_location (clobber_stmt, gimple_location (stmt));
4896 gsi_replace (&stmt_gsi, clobber_stmt, false);
4897 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
4898 }
4899 else
4900 gsi_remove (&stmt_gsi, true);
4901 }
4902
4903 if (purge_dead_abnormal_edges)
4904 {
4905 gimple_purge_dead_eh_edges (return_block);
4906 gimple_purge_dead_abnormal_call_edges (return_block);
4907 }
4908
4909 /* If the value of the new expression is ignored, that's OK. We
4910 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4911 the equivalent inlined version either. */
4912 if (is_gimple_assign (stmt))
4913 {
4914 gcc_assert (gimple_assign_single_p (stmt)
4915 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4916 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4917 }
4918
4919 id->assign_stmts.release ();
4920 id->add_clobbers_to_eh_landing_pads = 0;
4921
4922 /* Output the inlining info for this abstract function, since it has been
4923 inlined. If we don't do this now, we can lose the information about the
4924 variables in the function when the blocks get blown away as soon as we
4925 remove the cgraph node. */
4926 if (gimple_block (stmt))
4927 (*debug_hooks->outlining_inline_function) (fn);
4928
4929 /* Update callgraph if needed. */
4930 cg_edge->callee->remove ();
4931
4932 id->block = NULL_TREE;
4933 id->retvar = NULL_TREE;
4934 successfully_inlined = true;
4935
4936 egress:
4937 input_location = saved_location;
4938 return successfully_inlined;
4939 }
4940
4941 /* Expand call statements reachable from STMT_P.
4942 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4943 in a MODIFY_EXPR. */
4944
4945 static bool
4946 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4947 {
4948 gimple_stmt_iterator gsi;
4949 bool inlined = false;
4950
4951 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4952 {
4953 gimple *stmt = gsi_stmt (gsi);
4954 gsi_prev (&gsi);
4955
4956 if (is_gimple_call (stmt)
4957 && !gimple_call_internal_p (stmt))
4958 inlined |= expand_call_inline (bb, stmt, id);
4959 }
4960
4961 return inlined;
4962 }
4963
4964
4965 /* Walk all basic blocks created after FIRST and try to fold every statement
4966 in the STATEMENTS pointer set. */
4967
4968 static void
4969 fold_marked_statements (int first, hash_set<gimple *> *statements)
4970 {
4971 for (; first < last_basic_block_for_fn (cfun); first++)
4972 if (BASIC_BLOCK_FOR_FN (cfun, first))
4973 {
4974 gimple_stmt_iterator gsi;
4975
4976 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4977 !gsi_end_p (gsi);
4978 gsi_next (&gsi))
4979 if (statements->contains (gsi_stmt (gsi)))
4980 {
4981 gimple *old_stmt = gsi_stmt (gsi);
4982 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4983
4984 if (old_decl && fndecl_built_in_p (old_decl))
4985 {
4986 /* Folding builtins can create multiple instructions,
4987 we need to look at all of them. */
4988 gimple_stmt_iterator i2 = gsi;
4989 gsi_prev (&i2);
4990 if (fold_stmt (&gsi))
4991 {
4992 gimple *new_stmt;
4993 /* If a builtin at the end of a bb folded into nothing,
4994 the following loop won't work. */
4995 if (gsi_end_p (gsi))
4996 {
4997 cgraph_update_edges_for_call_stmt (old_stmt,
4998 old_decl, NULL);
4999 break;
5000 }
5001 if (gsi_end_p (i2))
5002 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5003 else
5004 gsi_next (&i2);
5005 while (1)
5006 {
5007 new_stmt = gsi_stmt (i2);
5008 update_stmt (new_stmt);
5009 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5010 new_stmt);
5011
5012 if (new_stmt == gsi_stmt (gsi))
5013 {
5014 /* It is okay to check only for the very last
5015 of these statements. If it is a throwing
5016 statement nothing will change. If it isn't
5017 this can remove EH edges. If that weren't
5018 correct then because some intermediate stmts
5019 throw, but not the last one. That would mean
5020 we'd have to split the block, which we can't
5021 here and we'd loose anyway. And as builtins
5022 probably never throw, this all
5023 is mood anyway. */
5024 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5025 new_stmt))
5026 gimple_purge_dead_eh_edges (
5027 BASIC_BLOCK_FOR_FN (cfun, first));
5028 break;
5029 }
5030 gsi_next (&i2);
5031 }
5032 }
5033 }
5034 else if (fold_stmt (&gsi))
5035 {
5036 /* Re-read the statement from GSI as fold_stmt() may
5037 have changed it. */
5038 gimple *new_stmt = gsi_stmt (gsi);
5039 update_stmt (new_stmt);
5040
5041 if (is_gimple_call (old_stmt)
5042 || is_gimple_call (new_stmt))
5043 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5044 new_stmt);
5045
5046 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5047 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5048 first));
5049 }
5050 }
5051 }
5052 }
5053
5054 /* Expand calls to inline functions in the body of FN. */
5055
5056 unsigned int
5057 optimize_inline_calls (tree fn)
5058 {
5059 copy_body_data id;
5060 basic_block bb;
5061 int last = n_basic_blocks_for_fn (cfun);
5062 bool inlined_p = false;
5063
5064 /* Clear out ID. */
5065 memset (&id, 0, sizeof (id));
5066
5067 id.src_node = id.dst_node = cgraph_node::get (fn);
5068 gcc_assert (id.dst_node->definition);
5069 id.dst_fn = fn;
5070 /* Or any functions that aren't finished yet. */
5071 if (current_function_decl)
5072 id.dst_fn = current_function_decl;
5073
5074 id.copy_decl = copy_decl_maybe_to_var;
5075 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5076 id.transform_new_cfg = false;
5077 id.transform_return_to_modify = true;
5078 id.transform_parameter = true;
5079 id.transform_lang_insert_block = NULL;
5080 id.statements_to_fold = new hash_set<gimple *>;
5081
5082 push_gimplify_context ();
5083
5084 /* We make no attempts to keep dominance info up-to-date. */
5085 free_dominance_info (CDI_DOMINATORS);
5086 free_dominance_info (CDI_POST_DOMINATORS);
5087
5088 /* Register specific gimple functions. */
5089 gimple_register_cfg_hooks ();
5090
5091 /* Reach the trees by walking over the CFG, and note the
5092 enclosing basic-blocks in the call edges. */
5093 /* We walk the blocks going forward, because inlined function bodies
5094 will split id->current_basic_block, and the new blocks will
5095 follow it; we'll trudge through them, processing their CALL_EXPRs
5096 along the way. */
5097 FOR_EACH_BB_FN (bb, cfun)
5098 inlined_p |= gimple_expand_calls_inline (bb, &id);
5099
5100 pop_gimplify_context (NULL);
5101
5102 if (flag_checking)
5103 {
5104 struct cgraph_edge *e;
5105
5106 id.dst_node->verify ();
5107
5108 /* Double check that we inlined everything we are supposed to inline. */
5109 for (e = id.dst_node->callees; e; e = e->next_callee)
5110 gcc_assert (e->inline_failed);
5111 }
5112
5113 /* Fold queued statements. */
5114 update_max_bb_count ();
5115 fold_marked_statements (last, id.statements_to_fold);
5116 delete id.statements_to_fold;
5117
5118 gcc_assert (!id.debug_stmts.exists ());
5119
5120 /* If we didn't inline into the function there is nothing to do. */
5121 if (!inlined_p)
5122 return 0;
5123
5124 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5125 number_blocks (fn);
5126
5127 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5128
5129 if (flag_checking)
5130 id.dst_node->verify ();
5131
5132 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5133 not possible yet - the IPA passes might make various functions to not
5134 throw and they don't care to proactively update local EH info. This is
5135 done later in fixup_cfg pass that also execute the verification. */
5136 return (TODO_update_ssa
5137 | TODO_cleanup_cfg
5138 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5139 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5140 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5141 ? TODO_rebuild_frequencies : 0));
5142 }
5143
5144 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5145
5146 tree
5147 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5148 {
5149 enum tree_code code = TREE_CODE (*tp);
5150 enum tree_code_class cl = TREE_CODE_CLASS (code);
5151
5152 /* We make copies of most nodes. */
5153 if (IS_EXPR_CODE_CLASS (cl)
5154 || code == TREE_LIST
5155 || code == TREE_VEC
5156 || code == TYPE_DECL
5157 || code == OMP_CLAUSE)
5158 {
5159 /* Because the chain gets clobbered when we make a copy, we save it
5160 here. */
5161 tree chain = NULL_TREE, new_tree;
5162
5163 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5164 chain = TREE_CHAIN (*tp);
5165
5166 /* Copy the node. */
5167 new_tree = copy_node (*tp);
5168
5169 *tp = new_tree;
5170
5171 /* Now, restore the chain, if appropriate. That will cause
5172 walk_tree to walk into the chain as well. */
5173 if (code == PARM_DECL
5174 || code == TREE_LIST
5175 || code == OMP_CLAUSE)
5176 TREE_CHAIN (*tp) = chain;
5177
5178 /* For now, we don't update BLOCKs when we make copies. So, we
5179 have to nullify all BIND_EXPRs. */
5180 if (TREE_CODE (*tp) == BIND_EXPR)
5181 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5182 }
5183 else if (code == CONSTRUCTOR)
5184 {
5185 /* CONSTRUCTOR nodes need special handling because
5186 we need to duplicate the vector of elements. */
5187 tree new_tree;
5188
5189 new_tree = copy_node (*tp);
5190 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5191 *tp = new_tree;
5192 }
5193 else if (code == STATEMENT_LIST)
5194 /* We used to just abort on STATEMENT_LIST, but we can run into them
5195 with statement-expressions (c++/40975). */
5196 copy_statement_list (tp);
5197 else if (TREE_CODE_CLASS (code) == tcc_type)
5198 *walk_subtrees = 0;
5199 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5200 *walk_subtrees = 0;
5201 else if (TREE_CODE_CLASS (code) == tcc_constant)
5202 *walk_subtrees = 0;
5203 return NULL_TREE;
5204 }
5205
5206 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5207 information indicating to what new SAVE_EXPR this one should be mapped,
5208 use that one. Otherwise, create a new node and enter it in ST. FN is
5209 the function into which the copy will be placed. */
5210
5211 static void
5212 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5213 {
5214 tree *n;
5215 tree t;
5216
5217 /* See if we already encountered this SAVE_EXPR. */
5218 n = st->get (*tp);
5219
5220 /* If we didn't already remap this SAVE_EXPR, do so now. */
5221 if (!n)
5222 {
5223 t = copy_node (*tp);
5224
5225 /* Remember this SAVE_EXPR. */
5226 st->put (*tp, t);
5227 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5228 st->put (t, t);
5229 }
5230 else
5231 {
5232 /* We've already walked into this SAVE_EXPR; don't do it again. */
5233 *walk_subtrees = 0;
5234 t = *n;
5235 }
5236
5237 /* Replace this SAVE_EXPR with the copy. */
5238 *tp = t;
5239 }
5240
5241 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5242 label, copies the declaration and enters it in the splay_tree in DATA (which
5243 is really a 'copy_body_data *'. */
5244
5245 static tree
5246 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5247 bool *handled_ops_p ATTRIBUTE_UNUSED,
5248 struct walk_stmt_info *wi)
5249 {
5250 copy_body_data *id = (copy_body_data *) wi->info;
5251 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5252
5253 if (stmt)
5254 {
5255 tree decl = gimple_label_label (stmt);
5256
5257 /* Copy the decl and remember the copy. */
5258 insert_decl_map (id, decl, id->copy_decl (decl, id));
5259 }
5260
5261 return NULL_TREE;
5262 }
5263
5264 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5265 struct walk_stmt_info *wi);
5266
5267 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5268 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5269 remaps all local declarations to appropriate replacements in gimple
5270 operands. */
5271
5272 static tree
5273 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5274 {
5275 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5276 copy_body_data *id = (copy_body_data *) wi->info;
5277 hash_map<tree, tree> *st = id->decl_map;
5278 tree *n;
5279 tree expr = *tp;
5280
5281 /* For recursive invocations this is no longer the LHS itself. */
5282 bool is_lhs = wi->is_lhs;
5283 wi->is_lhs = false;
5284
5285 if (TREE_CODE (expr) == SSA_NAME)
5286 {
5287 *tp = remap_ssa_name (*tp, id);
5288 *walk_subtrees = 0;
5289 if (is_lhs)
5290 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5291 }
5292 /* Only a local declaration (variable or label). */
5293 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5294 || TREE_CODE (expr) == LABEL_DECL)
5295 {
5296 /* Lookup the declaration. */
5297 n = st->get (expr);
5298
5299 /* If it's there, remap it. */
5300 if (n)
5301 *tp = *n;
5302 *walk_subtrees = 0;
5303 }
5304 else if (TREE_CODE (expr) == STATEMENT_LIST
5305 || TREE_CODE (expr) == BIND_EXPR
5306 || TREE_CODE (expr) == SAVE_EXPR)
5307 gcc_unreachable ();
5308 else if (TREE_CODE (expr) == TARGET_EXPR)
5309 {
5310 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5311 It's OK for this to happen if it was part of a subtree that
5312 isn't immediately expanded, such as operand 2 of another
5313 TARGET_EXPR. */
5314 if (!TREE_OPERAND (expr, 1))
5315 {
5316 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5317 TREE_OPERAND (expr, 3) = NULL_TREE;
5318 }
5319 }
5320 else if (TREE_CODE (expr) == OMP_CLAUSE)
5321 {
5322 /* Before the omplower pass completes, some OMP clauses can contain
5323 sequences that are neither copied by gimple_seq_copy nor walked by
5324 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5325 in those situations, we have to copy and process them explicitely. */
5326
5327 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5328 {
5329 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5330 seq = duplicate_remap_omp_clause_seq (seq, wi);
5331 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5332 }
5333 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5334 {
5335 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5336 seq = duplicate_remap_omp_clause_seq (seq, wi);
5337 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5338 }
5339 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5340 {
5341 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5342 seq = duplicate_remap_omp_clause_seq (seq, wi);
5343 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5344 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5345 seq = duplicate_remap_omp_clause_seq (seq, wi);
5346 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5347 }
5348 }
5349
5350 /* Keep iterating. */
5351 return NULL_TREE;
5352 }
5353
5354
5355 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5356 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5357 remaps all local declarations to appropriate replacements in gimple
5358 statements. */
5359
5360 static tree
5361 replace_locals_stmt (gimple_stmt_iterator *gsip,
5362 bool *handled_ops_p ATTRIBUTE_UNUSED,
5363 struct walk_stmt_info *wi)
5364 {
5365 copy_body_data *id = (copy_body_data *) wi->info;
5366 gimple *gs = gsi_stmt (*gsip);
5367
5368 if (gbind *stmt = dyn_cast <gbind *> (gs))
5369 {
5370 tree block = gimple_bind_block (stmt);
5371
5372 if (block)
5373 {
5374 remap_block (&block, id);
5375 gimple_bind_set_block (stmt, block);
5376 }
5377
5378 /* This will remap a lot of the same decls again, but this should be
5379 harmless. */
5380 if (gimple_bind_vars (stmt))
5381 {
5382 tree old_var, decls = gimple_bind_vars (stmt);
5383
5384 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5385 if (!can_be_nonlocal (old_var, id)
5386 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5387 remap_decl (old_var, id);
5388
5389 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5390 id->prevent_decl_creation_for_types = true;
5391 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5392 id->prevent_decl_creation_for_types = false;
5393 }
5394 }
5395
5396 /* Keep iterating. */
5397 return NULL_TREE;
5398 }
5399
5400 /* Create a copy of SEQ and remap all decls in it. */
5401
5402 static gimple_seq
5403 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5404 {
5405 if (!seq)
5406 return NULL;
5407
5408 /* If there are any labels in OMP sequences, they can be only referred to in
5409 the sequence itself and therefore we can do both here. */
5410 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5411 gimple_seq copy = gimple_seq_copy (seq);
5412 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5413 return copy;
5414 }
5415
5416 /* Copies everything in SEQ and replaces variables and labels local to
5417 current_function_decl. */
5418
5419 gimple_seq
5420 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5421 {
5422 copy_body_data id;
5423 struct walk_stmt_info wi;
5424 gimple_seq copy;
5425
5426 /* There's nothing to do for NULL_TREE. */
5427 if (seq == NULL)
5428 return seq;
5429
5430 /* Set up ID. */
5431 memset (&id, 0, sizeof (id));
5432 id.src_fn = current_function_decl;
5433 id.dst_fn = current_function_decl;
5434 id.src_cfun = cfun;
5435 id.decl_map = new hash_map<tree, tree>;
5436 id.debug_map = NULL;
5437
5438 id.copy_decl = copy_decl_no_change;
5439 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5440 id.transform_new_cfg = false;
5441 id.transform_return_to_modify = false;
5442 id.transform_parameter = false;
5443 id.transform_lang_insert_block = NULL;
5444
5445 /* Walk the tree once to find local labels. */
5446 memset (&wi, 0, sizeof (wi));
5447 hash_set<tree> visited;
5448 wi.info = &id;
5449 wi.pset = &visited;
5450 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5451
5452 copy = gimple_seq_copy (seq);
5453
5454 /* Walk the copy, remapping decls. */
5455 memset (&wi, 0, sizeof (wi));
5456 wi.info = &id;
5457 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5458
5459 /* Clean up. */
5460 delete id.decl_map;
5461 if (id.debug_map)
5462 delete id.debug_map;
5463 if (id.dependence_map)
5464 {
5465 delete id.dependence_map;
5466 id.dependence_map = NULL;
5467 }
5468
5469 return copy;
5470 }
5471
5472
5473 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5474
5475 static tree
5476 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5477 {
5478 if (*tp == data)
5479 return (tree) data;
5480 else
5481 return NULL;
5482 }
5483
5484 DEBUG_FUNCTION bool
5485 debug_find_tree (tree top, tree search)
5486 {
5487 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5488 }
5489
5490
5491 /* Declare the variables created by the inliner. Add all the variables in
5492 VARS to BIND_EXPR. */
5493
5494 static void
5495 declare_inline_vars (tree block, tree vars)
5496 {
5497 tree t;
5498 for (t = vars; t; t = DECL_CHAIN (t))
5499 {
5500 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5501 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5502 add_local_decl (cfun, t);
5503 }
5504
5505 if (block)
5506 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5507 }
5508
5509 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5510 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5511 VAR_DECL translation. */
5512
5513 tree
5514 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5515 {
5516 /* Don't generate debug information for the copy if we wouldn't have
5517 generated it for the copy either. */
5518 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5519 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5520
5521 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5522 declaration inspired this copy. */
5523 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5524
5525 /* The new variable/label has no RTL, yet. */
5526 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5527 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5528 SET_DECL_RTL (copy, 0);
5529 /* For vector typed decls make sure to update DECL_MODE according
5530 to the new function context. */
5531 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5532 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5533
5534 /* These args would always appear unused, if not for this. */
5535 TREE_USED (copy) = 1;
5536
5537 /* Set the context for the new declaration. */
5538 if (!DECL_CONTEXT (decl))
5539 /* Globals stay global. */
5540 ;
5541 else if (DECL_CONTEXT (decl) != id->src_fn)
5542 /* Things that weren't in the scope of the function we're inlining
5543 from aren't in the scope we're inlining to, either. */
5544 ;
5545 else if (TREE_STATIC (decl))
5546 /* Function-scoped static variables should stay in the original
5547 function. */
5548 ;
5549 else
5550 {
5551 /* Ordinary automatic local variables are now in the scope of the
5552 new function. */
5553 DECL_CONTEXT (copy) = id->dst_fn;
5554 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5555 {
5556 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5557 DECL_ATTRIBUTES (copy)
5558 = tree_cons (get_identifier ("omp simt private"), NULL,
5559 DECL_ATTRIBUTES (copy));
5560 id->dst_simt_vars->safe_push (copy);
5561 }
5562 }
5563
5564 return copy;
5565 }
5566
5567 static tree
5568 copy_decl_to_var (tree decl, copy_body_data *id)
5569 {
5570 tree copy, type;
5571
5572 gcc_assert (TREE_CODE (decl) == PARM_DECL
5573 || TREE_CODE (decl) == RESULT_DECL);
5574
5575 type = TREE_TYPE (decl);
5576
5577 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5578 VAR_DECL, DECL_NAME (decl), type);
5579 if (DECL_PT_UID_SET_P (decl))
5580 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5581 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5582 TREE_READONLY (copy) = TREE_READONLY (decl);
5583 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5584 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5585
5586 return copy_decl_for_dup_finish (id, decl, copy);
5587 }
5588
5589 /* Like copy_decl_to_var, but create a return slot object instead of a
5590 pointer variable for return by invisible reference. */
5591
5592 static tree
5593 copy_result_decl_to_var (tree decl, copy_body_data *id)
5594 {
5595 tree copy, type;
5596
5597 gcc_assert (TREE_CODE (decl) == PARM_DECL
5598 || TREE_CODE (decl) == RESULT_DECL);
5599
5600 type = TREE_TYPE (decl);
5601 if (DECL_BY_REFERENCE (decl))
5602 type = TREE_TYPE (type);
5603
5604 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5605 VAR_DECL, DECL_NAME (decl), type);
5606 if (DECL_PT_UID_SET_P (decl))
5607 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5608 TREE_READONLY (copy) = TREE_READONLY (decl);
5609 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5610 if (!DECL_BY_REFERENCE (decl))
5611 {
5612 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5613 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5614 }
5615
5616 return copy_decl_for_dup_finish (id, decl, copy);
5617 }
5618
5619 tree
5620 copy_decl_no_change (tree decl, copy_body_data *id)
5621 {
5622 tree copy;
5623
5624 copy = copy_node (decl);
5625
5626 /* The COPY is not abstract; it will be generated in DST_FN. */
5627 DECL_ABSTRACT_P (copy) = false;
5628 lang_hooks.dup_lang_specific_decl (copy);
5629
5630 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5631 been taken; it's for internal bookkeeping in expand_goto_internal. */
5632 if (TREE_CODE (copy) == LABEL_DECL)
5633 {
5634 TREE_ADDRESSABLE (copy) = 0;
5635 LABEL_DECL_UID (copy) = -1;
5636 }
5637
5638 return copy_decl_for_dup_finish (id, decl, copy);
5639 }
5640
5641 static tree
5642 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5643 {
5644 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5645 return copy_decl_to_var (decl, id);
5646 else
5647 return copy_decl_no_change (decl, id);
5648 }
5649
5650 /* Return a copy of the function's argument tree. */
5651 static tree
5652 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5653 bitmap args_to_skip, tree *vars)
5654 {
5655 tree arg, *parg;
5656 tree new_parm = NULL;
5657 int i = 0;
5658
5659 parg = &new_parm;
5660
5661 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5662 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5663 {
5664 tree new_tree = remap_decl (arg, id);
5665 if (TREE_CODE (new_tree) != PARM_DECL)
5666 new_tree = id->copy_decl (arg, id);
5667 lang_hooks.dup_lang_specific_decl (new_tree);
5668 *parg = new_tree;
5669 parg = &DECL_CHAIN (new_tree);
5670 }
5671 else if (!id->decl_map->get (arg))
5672 {
5673 /* Make an equivalent VAR_DECL. If the argument was used
5674 as temporary variable later in function, the uses will be
5675 replaced by local variable. */
5676 tree var = copy_decl_to_var (arg, id);
5677 insert_decl_map (id, arg, var);
5678 /* Declare this new variable. */
5679 DECL_CHAIN (var) = *vars;
5680 *vars = var;
5681 }
5682 return new_parm;
5683 }
5684
5685 /* Return a copy of the function's static chain. */
5686 static tree
5687 copy_static_chain (tree static_chain, copy_body_data * id)
5688 {
5689 tree *chain_copy, *pvar;
5690
5691 chain_copy = &static_chain;
5692 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5693 {
5694 tree new_tree = remap_decl (*pvar, id);
5695 lang_hooks.dup_lang_specific_decl (new_tree);
5696 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5697 *pvar = new_tree;
5698 }
5699 return static_chain;
5700 }
5701
5702 /* Return true if the function is allowed to be versioned.
5703 This is a guard for the versioning functionality. */
5704
5705 bool
5706 tree_versionable_function_p (tree fndecl)
5707 {
5708 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5709 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5710 }
5711
5712 /* Update clone info after duplication. */
5713
5714 static void
5715 update_clone_info (copy_body_data * id)
5716 {
5717 struct cgraph_node *node;
5718 if (!id->dst_node->clones)
5719 return;
5720 for (node = id->dst_node->clones; node != id->dst_node;)
5721 {
5722 /* First update replace maps to match the new body. */
5723 if (node->clone.tree_map)
5724 {
5725 unsigned int i;
5726 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5727 {
5728 struct ipa_replace_map *replace_info;
5729 replace_info = (*node->clone.tree_map)[i];
5730 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5731 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5732 }
5733 }
5734 if (node->clones)
5735 node = node->clones;
5736 else if (node->next_sibling_clone)
5737 node = node->next_sibling_clone;
5738 else
5739 {
5740 while (node != id->dst_node && !node->next_sibling_clone)
5741 node = node->clone_of;
5742 if (node != id->dst_node)
5743 node = node->next_sibling_clone;
5744 }
5745 }
5746 }
5747
5748 /* Create a copy of a function's tree.
5749 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5750 of the original function and the new copied function
5751 respectively. In case we want to replace a DECL
5752 tree with another tree while duplicating the function's
5753 body, TREE_MAP represents the mapping between these
5754 trees. If UPDATE_CLONES is set, the call_stmt fields
5755 of edges of clones of the function will be updated.
5756
5757 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5758 from new version.
5759 If SKIP_RETURN is true, the new version will return void.
5760 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5761 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5762 */
5763 void
5764 tree_function_versioning (tree old_decl, tree new_decl,
5765 vec<ipa_replace_map *, va_gc> *tree_map,
5766 bool update_clones, bitmap args_to_skip,
5767 bool skip_return, bitmap blocks_to_copy,
5768 basic_block new_entry)
5769 {
5770 struct cgraph_node *old_version_node;
5771 struct cgraph_node *new_version_node;
5772 copy_body_data id;
5773 tree p;
5774 unsigned i;
5775 struct ipa_replace_map *replace_info;
5776 basic_block old_entry_block, bb;
5777 auto_vec<gimple *, 10> init_stmts;
5778 tree vars = NULL_TREE;
5779 bitmap debug_args_to_skip = args_to_skip;
5780
5781 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5782 && TREE_CODE (new_decl) == FUNCTION_DECL);
5783 DECL_POSSIBLY_INLINED (old_decl) = 1;
5784
5785 old_version_node = cgraph_node::get (old_decl);
5786 gcc_checking_assert (old_version_node);
5787 new_version_node = cgraph_node::get (new_decl);
5788 gcc_checking_assert (new_version_node);
5789
5790 /* Copy over debug args. */
5791 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5792 {
5793 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5794 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5795 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5796 old_debug_args = decl_debug_args_lookup (old_decl);
5797 if (old_debug_args)
5798 {
5799 new_debug_args = decl_debug_args_insert (new_decl);
5800 *new_debug_args = vec_safe_copy (*old_debug_args);
5801 }
5802 }
5803
5804 /* Output the inlining info for this abstract function, since it has been
5805 inlined. If we don't do this now, we can lose the information about the
5806 variables in the function when the blocks get blown away as soon as we
5807 remove the cgraph node. */
5808 (*debug_hooks->outlining_inline_function) (old_decl);
5809
5810 DECL_ARTIFICIAL (new_decl) = 1;
5811 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5812 if (DECL_ORIGIN (old_decl) == old_decl)
5813 old_version_node->used_as_abstract_origin = true;
5814 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5815
5816 /* Prepare the data structures for the tree copy. */
5817 memset (&id, 0, sizeof (id));
5818
5819 /* Generate a new name for the new version. */
5820 id.statements_to_fold = new hash_set<gimple *>;
5821
5822 id.decl_map = new hash_map<tree, tree>;
5823 id.debug_map = NULL;
5824 id.src_fn = old_decl;
5825 id.dst_fn = new_decl;
5826 id.src_node = old_version_node;
5827 id.dst_node = new_version_node;
5828 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5829 id.blocks_to_copy = blocks_to_copy;
5830
5831 id.copy_decl = copy_decl_no_change;
5832 id.transform_call_graph_edges
5833 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5834 id.transform_new_cfg = true;
5835 id.transform_return_to_modify = false;
5836 id.transform_parameter = false;
5837 id.transform_lang_insert_block = NULL;
5838
5839 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5840 (DECL_STRUCT_FUNCTION (old_decl));
5841 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5842 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5843 initialize_cfun (new_decl, old_decl,
5844 new_entry ? new_entry->count : old_entry_block->count);
5845 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5846 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5847 = id.src_cfun->gimple_df->ipa_pta;
5848
5849 /* Copy the function's static chain. */
5850 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5851 if (p)
5852 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5853 = copy_static_chain (p, &id);
5854
5855 /* If there's a tree_map, prepare for substitution. */
5856 if (tree_map)
5857 for (i = 0; i < tree_map->length (); i++)
5858 {
5859 gimple *init;
5860 replace_info = (*tree_map)[i];
5861 if (replace_info->replace_p)
5862 {
5863 int parm_num = -1;
5864 if (!replace_info->old_tree)
5865 {
5866 int p = replace_info->parm_num;
5867 tree parm;
5868 tree req_type, new_type;
5869
5870 for (parm = DECL_ARGUMENTS (old_decl); p;
5871 parm = DECL_CHAIN (parm))
5872 p--;
5873 replace_info->old_tree = parm;
5874 parm_num = replace_info->parm_num;
5875 req_type = TREE_TYPE (parm);
5876 new_type = TREE_TYPE (replace_info->new_tree);
5877 if (!useless_type_conversion_p (req_type, new_type))
5878 {
5879 if (fold_convertible_p (req_type, replace_info->new_tree))
5880 replace_info->new_tree
5881 = fold_build1 (NOP_EXPR, req_type,
5882 replace_info->new_tree);
5883 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5884 replace_info->new_tree
5885 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
5886 replace_info->new_tree);
5887 else
5888 {
5889 if (dump_file)
5890 {
5891 fprintf (dump_file, " const ");
5892 print_generic_expr (dump_file,
5893 replace_info->new_tree);
5894 fprintf (dump_file,
5895 " can't be converted to param ");
5896 print_generic_expr (dump_file, parm);
5897 fprintf (dump_file, "\n");
5898 }
5899 replace_info->old_tree = NULL;
5900 }
5901 }
5902 }
5903 else
5904 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5905 if (replace_info->old_tree)
5906 {
5907 init = setup_one_parameter (&id, replace_info->old_tree,
5908 replace_info->new_tree, id.src_fn,
5909 NULL,
5910 &vars);
5911 if (init)
5912 init_stmts.safe_push (init);
5913 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
5914 {
5915 if (parm_num == -1)
5916 {
5917 tree parm;
5918 int p;
5919 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
5920 parm = DECL_CHAIN (parm), p++)
5921 if (parm == replace_info->old_tree)
5922 {
5923 parm_num = p;
5924 break;
5925 }
5926 }
5927 if (parm_num != -1)
5928 {
5929 if (debug_args_to_skip == args_to_skip)
5930 {
5931 debug_args_to_skip = BITMAP_ALLOC (NULL);
5932 bitmap_copy (debug_args_to_skip, args_to_skip);
5933 }
5934 bitmap_clear_bit (debug_args_to_skip, parm_num);
5935 }
5936 }
5937 }
5938 }
5939 }
5940 /* Copy the function's arguments. */
5941 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5942 DECL_ARGUMENTS (new_decl)
5943 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5944 args_to_skip, &vars);
5945
5946 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5947 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5948
5949 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5950
5951 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5952 /* Add local vars. */
5953 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5954
5955 if (DECL_RESULT (old_decl) == NULL_TREE)
5956 ;
5957 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5958 {
5959 DECL_RESULT (new_decl)
5960 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5961 RESULT_DECL, NULL_TREE, void_type_node);
5962 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5963 cfun->returns_struct = 0;
5964 cfun->returns_pcc_struct = 0;
5965 }
5966 else
5967 {
5968 tree old_name;
5969 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5970 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5971 if (gimple_in_ssa_p (id.src_cfun)
5972 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5973 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5974 {
5975 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5976 insert_decl_map (&id, old_name, new_name);
5977 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5978 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5979 }
5980 }
5981
5982 /* Set up the destination functions loop tree. */
5983 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5984 {
5985 cfun->curr_properties &= ~PROP_loops;
5986 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5987 cfun->curr_properties |= PROP_loops;
5988 }
5989
5990 /* Copy the Function's body. */
5991 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5992 new_entry);
5993
5994 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5995 number_blocks (new_decl);
5996
5997 /* We want to create the BB unconditionally, so that the addition of
5998 debug stmts doesn't affect BB count, which may in the end cause
5999 codegen differences. */
6000 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6001 while (init_stmts.length ())
6002 insert_init_stmt (&id, bb, init_stmts.pop ());
6003 update_clone_info (&id);
6004
6005 /* Remap the nonlocal_goto_save_area, if any. */
6006 if (cfun->nonlocal_goto_save_area)
6007 {
6008 struct walk_stmt_info wi;
6009
6010 memset (&wi, 0, sizeof (wi));
6011 wi.info = &id;
6012 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6013 }
6014
6015 /* Clean up. */
6016 delete id.decl_map;
6017 if (id.debug_map)
6018 delete id.debug_map;
6019 free_dominance_info (CDI_DOMINATORS);
6020 free_dominance_info (CDI_POST_DOMINATORS);
6021
6022 update_max_bb_count ();
6023 fold_marked_statements (0, id.statements_to_fold);
6024 delete id.statements_to_fold;
6025 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6026 if (id.dst_node->definition)
6027 cgraph_edge::rebuild_references ();
6028 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6029 {
6030 calculate_dominance_info (CDI_DOMINATORS);
6031 fix_loop_structure (NULL);
6032 }
6033 update_ssa (TODO_update_ssa);
6034
6035 /* After partial cloning we need to rescale frequencies, so they are
6036 within proper range in the cloned function. */
6037 if (new_entry)
6038 {
6039 struct cgraph_edge *e;
6040 rebuild_frequencies ();
6041
6042 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6043 for (e = new_version_node->callees; e; e = e->next_callee)
6044 {
6045 basic_block bb = gimple_bb (e->call_stmt);
6046 e->count = bb->count;
6047 }
6048 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6049 {
6050 basic_block bb = gimple_bb (e->call_stmt);
6051 e->count = bb->count;
6052 }
6053 }
6054
6055 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6056 {
6057 tree parm;
6058 vec<tree, va_gc> **debug_args = NULL;
6059 unsigned int len = 0;
6060 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6061 parm; parm = DECL_CHAIN (parm), i++)
6062 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6063 {
6064 tree ddecl;
6065
6066 if (debug_args == NULL)
6067 {
6068 debug_args = decl_debug_args_insert (new_decl);
6069 len = vec_safe_length (*debug_args);
6070 }
6071 ddecl = make_node (DEBUG_EXPR_DECL);
6072 DECL_ARTIFICIAL (ddecl) = 1;
6073 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6074 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6075 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6076 vec_safe_push (*debug_args, ddecl);
6077 }
6078 if (debug_args != NULL)
6079 {
6080 /* On the callee side, add
6081 DEBUG D#Y s=> parm
6082 DEBUG var => D#Y
6083 stmts to the first bb where var is a VAR_DECL created for the
6084 optimized away parameter in DECL_INITIAL block. This hints
6085 in the debug info that var (whole DECL_ORIGIN is the parm
6086 PARM_DECL) is optimized away, but could be looked up at the
6087 call site as value of D#X there. */
6088 tree var = vars, vexpr;
6089 gimple_stmt_iterator cgsi
6090 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6091 gimple *def_temp;
6092 var = vars;
6093 i = vec_safe_length (*debug_args);
6094 do
6095 {
6096 i -= 2;
6097 while (var != NULL_TREE
6098 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6099 var = TREE_CHAIN (var);
6100 if (var == NULL_TREE)
6101 break;
6102 vexpr = make_node (DEBUG_EXPR_DECL);
6103 parm = (**debug_args)[i];
6104 DECL_ARTIFICIAL (vexpr) = 1;
6105 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6106 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6107 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6108 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6109 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6110 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6111 }
6112 while (i > len);
6113 }
6114 }
6115
6116 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6117 BITMAP_FREE (debug_args_to_skip);
6118 free_dominance_info (CDI_DOMINATORS);
6119 free_dominance_info (CDI_POST_DOMINATORS);
6120
6121 gcc_assert (!id.debug_stmts.exists ());
6122 pop_cfun ();
6123 return;
6124 }
6125
6126 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6127 the callee and return the inlined body on success. */
6128
6129 tree
6130 maybe_inline_call_in_expr (tree exp)
6131 {
6132 tree fn = get_callee_fndecl (exp);
6133
6134 /* We can only try to inline "const" functions. */
6135 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6136 {
6137 call_expr_arg_iterator iter;
6138 copy_body_data id;
6139 tree param, arg, t;
6140 hash_map<tree, tree> decl_map;
6141
6142 /* Remap the parameters. */
6143 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6144 param;
6145 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6146 decl_map.put (param, arg);
6147
6148 memset (&id, 0, sizeof (id));
6149 id.src_fn = fn;
6150 id.dst_fn = current_function_decl;
6151 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6152 id.decl_map = &decl_map;
6153
6154 id.copy_decl = copy_decl_no_change;
6155 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6156 id.transform_new_cfg = false;
6157 id.transform_return_to_modify = true;
6158 id.transform_parameter = true;
6159 id.transform_lang_insert_block = NULL;
6160
6161 /* Make sure not to unshare trees behind the front-end's back
6162 since front-end specific mechanisms may rely on sharing. */
6163 id.regimplify = false;
6164 id.do_not_unshare = true;
6165
6166 /* We're not inside any EH region. */
6167 id.eh_lp_nr = 0;
6168
6169 t = copy_tree_body (&id);
6170
6171 /* We can only return something suitable for use in a GENERIC
6172 expression tree. */
6173 if (TREE_CODE (t) == MODIFY_EXPR)
6174 return TREE_OPERAND (t, 1);
6175 }
6176
6177 return NULL_TREE;
6178 }
6179
6180 /* Duplicate a type, fields and all. */
6181
6182 tree
6183 build_duplicate_type (tree type)
6184 {
6185 struct copy_body_data id;
6186
6187 memset (&id, 0, sizeof (id));
6188 id.src_fn = current_function_decl;
6189 id.dst_fn = current_function_decl;
6190 id.src_cfun = cfun;
6191 id.decl_map = new hash_map<tree, tree>;
6192 id.debug_map = NULL;
6193 id.copy_decl = copy_decl_no_change;
6194
6195 type = remap_type_1 (type, &id);
6196
6197 delete id.decl_map;
6198 if (id.debug_map)
6199 delete id.debug_map;
6200
6201 TYPE_CANONICAL (type) = type;
6202
6203 return type;
6204 }
6205
6206 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6207 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6208 evaluation. */
6209
6210 tree
6211 copy_fn (tree fn, tree& parms, tree& result)
6212 {
6213 copy_body_data id;
6214 tree param;
6215 hash_map<tree, tree> decl_map;
6216
6217 tree *p = &parms;
6218 *p = NULL_TREE;
6219
6220 memset (&id, 0, sizeof (id));
6221 id.src_fn = fn;
6222 id.dst_fn = current_function_decl;
6223 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6224 id.decl_map = &decl_map;
6225
6226 id.copy_decl = copy_decl_no_change;
6227 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6228 id.transform_new_cfg = false;
6229 id.transform_return_to_modify = false;
6230 id.transform_parameter = true;
6231 id.transform_lang_insert_block = NULL;
6232
6233 /* Make sure not to unshare trees behind the front-end's back
6234 since front-end specific mechanisms may rely on sharing. */
6235 id.regimplify = false;
6236 id.do_not_unshare = true;
6237
6238 /* We're not inside any EH region. */
6239 id.eh_lp_nr = 0;
6240
6241 /* Remap the parameters and result and return them to the caller. */
6242 for (param = DECL_ARGUMENTS (fn);
6243 param;
6244 param = DECL_CHAIN (param))
6245 {
6246 *p = remap_decl (param, &id);
6247 p = &DECL_CHAIN (*p);
6248 }
6249
6250 if (DECL_RESULT (fn))
6251 result = remap_decl (DECL_RESULT (fn), &id);
6252 else
6253 result = NULL_TREE;
6254
6255 return copy_tree_body (&id);
6256 }