Remove MPX
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63
64 /* I'm not real happy about this, but we need to handle gimple and
65 non-gimple trees. */
66
67 /* Inlining, Cloning, Versioning, Parallelization
68
69 Inlining: a function body is duplicated, but the PARM_DECLs are
70 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
71 MODIFY_EXPRs that store to a dedicated returned-value variable.
72 The duplicated eh_region info of the copy will later be appended
73 to the info for the caller; the eh_region info in copied throwing
74 statements and RESX statements are adjusted accordingly.
75
76 Cloning: (only in C++) We have one body for a con/de/structor, and
77 multiple function decls, each with a unique parameter list.
78 Duplicate the body, using the given splay tree; some parameters
79 will become constants (like 0 or 1).
80
81 Versioning: a function body is duplicated and the result is a new
82 function rather than into blocks of an existing function as with
83 inlining. Some parameters will become constants.
84
85 Parallelization: a region of a function is duplicated resulting in
86 a new function. Variables may be replaced with complex expressions
87 to enable shared variable semantics.
88
89 All of these will simultaneously lookup any callgraph edges. If
90 we're going to inline the duplicated function body, and the given
91 function has some cloned callgraph nodes (one for each place this
92 function will be inlined) those callgraph edges will be duplicated.
93 If we're cloning the body, those callgraph edges will be
94 updated to point into the new body. (Note that the original
95 callgraph node and edge list will not be altered.)
96
97 See the CALL_EXPR handling case in copy_tree_body_r (). */
98
99 /* To Do:
100
101 o In order to make inlining-on-trees work, we pessimized
102 function-local static constants. In particular, they are now
103 always output, even when not addressed. Fix this by treating
104 function-local static constants just like global static
105 constants; the back-end already knows not to output them if they
106 are not needed.
107
108 o Provide heuristics to clamp inlining of recursive template
109 calls? */
110
111
112 /* Weights that estimate_num_insns uses to estimate the size of the
113 produced code. */
114
115 eni_weights eni_size_weights;
116
117 /* Weights that estimate_num_insns uses to estimate the time necessary
118 to execute the produced code. */
119
120 eni_weights eni_time_weights;
121
122 /* Prototypes. */
123
124 static tree declare_return_variable (copy_body_data *, tree, tree,
125 basic_block);
126 static void remap_block (tree *, copy_body_data *);
127 static void copy_bind_expr (tree *, int *, copy_body_data *);
128 static void declare_inline_vars (tree, tree);
129 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
130 static void prepend_lexical_block (tree current_block, tree new_block);
131 static tree copy_decl_to_var (tree, copy_body_data *);
132 static tree copy_result_decl_to_var (tree, copy_body_data *);
133 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
134 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
135 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
136 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
137
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
140
141 void
142 insert_decl_map (copy_body_data *id, tree key, tree value)
143 {
144 id->decl_map->put (key, value);
145
146 /* Always insert an identity map as well. If we see this same new
147 node again, we won't want to duplicate it a second time. */
148 if (key != value)
149 id->decl_map->put (value, value);
150 }
151
152 /* Insert a tree->tree mapping for ID. This is only used for
153 variables. */
154
155 static void
156 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
157 {
158 if (!gimple_in_ssa_p (id->src_cfun))
159 return;
160
161 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
162 return;
163
164 if (!target_for_debug_bind (key))
165 return;
166
167 gcc_assert (TREE_CODE (key) == PARM_DECL);
168 gcc_assert (VAR_P (value));
169
170 if (!id->debug_map)
171 id->debug_map = new hash_map<tree, tree>;
172
173 id->debug_map->put (key, value);
174 }
175
176 /* If nonzero, we're remapping the contents of inlined debug
177 statements. If negative, an error has occurred, such as a
178 reference to a variable that isn't available in the inlined
179 context. */
180 static int processing_debug_stmt = 0;
181
182 /* Construct new SSA name for old NAME. ID is the inline context. */
183
184 static tree
185 remap_ssa_name (tree name, copy_body_data *id)
186 {
187 tree new_tree, var;
188 tree *n;
189
190 gcc_assert (TREE_CODE (name) == SSA_NAME);
191
192 n = id->decl_map->get (name);
193 if (n)
194 return unshare_expr (*n);
195
196 if (processing_debug_stmt)
197 {
198 if (SSA_NAME_IS_DEFAULT_DEF (name)
199 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
200 && id->entry_bb == NULL
201 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
202 {
203 tree vexpr = make_node (DEBUG_EXPR_DECL);
204 gimple *def_temp;
205 gimple_stmt_iterator gsi;
206 tree val = SSA_NAME_VAR (name);
207
208 n = id->decl_map->get (val);
209 if (n != NULL)
210 val = *n;
211 if (TREE_CODE (val) != PARM_DECL)
212 {
213 processing_debug_stmt = -1;
214 return name;
215 }
216 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
217 DECL_ARTIFICIAL (vexpr) = 1;
218 TREE_TYPE (vexpr) = TREE_TYPE (name);
219 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
220 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
221 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
222 return vexpr;
223 }
224
225 processing_debug_stmt = -1;
226 return name;
227 }
228
229 /* Remap anonymous SSA names or SSA names of anonymous decls. */
230 var = SSA_NAME_VAR (name);
231 if (!var
232 || (!SSA_NAME_IS_DEFAULT_DEF (name)
233 && VAR_P (var)
234 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
235 && DECL_ARTIFICIAL (var)
236 && DECL_IGNORED_P (var)
237 && !DECL_NAME (var)))
238 {
239 struct ptr_info_def *pi;
240 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
241 if (!var && SSA_NAME_IDENTIFIER (name))
242 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
243 insert_decl_map (id, name, new_tree);
244 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
245 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
246 /* At least IPA points-to info can be directly transferred. */
247 if (id->src_cfun->gimple_df
248 && id->src_cfun->gimple_df->ipa_pta
249 && POINTER_TYPE_P (TREE_TYPE (name))
250 && (pi = SSA_NAME_PTR_INFO (name))
251 && !pi->pt.anything)
252 {
253 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
254 new_pi->pt = pi->pt;
255 }
256 return new_tree;
257 }
258
259 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
260 in copy_bb. */
261 new_tree = remap_decl (var, id);
262
263 /* We might've substituted constant or another SSA_NAME for
264 the variable.
265
266 Replace the SSA name representing RESULT_DECL by variable during
267 inlining: this saves us from need to introduce PHI node in a case
268 return value is just partly initialized. */
269 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
270 && (!SSA_NAME_VAR (name)
271 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
272 || !id->transform_return_to_modify))
273 {
274 struct ptr_info_def *pi;
275 new_tree = make_ssa_name (new_tree);
276 insert_decl_map (id, name, new_tree);
277 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
278 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
279 /* At least IPA points-to info can be directly transferred. */
280 if (id->src_cfun->gimple_df
281 && id->src_cfun->gimple_df->ipa_pta
282 && POINTER_TYPE_P (TREE_TYPE (name))
283 && (pi = SSA_NAME_PTR_INFO (name))
284 && !pi->pt.anything)
285 {
286 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
287 new_pi->pt = pi->pt;
288 }
289 if (SSA_NAME_IS_DEFAULT_DEF (name))
290 {
291 /* By inlining function having uninitialized variable, we might
292 extend the lifetime (variable might get reused). This cause
293 ICE in the case we end up extending lifetime of SSA name across
294 abnormal edge, but also increase register pressure.
295
296 We simply initialize all uninitialized vars by 0 except
297 for case we are inlining to very first BB. We can avoid
298 this for all BBs that are not inside strongly connected
299 regions of the CFG, but this is expensive to test. */
300 if (id->entry_bb
301 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
302 && (!SSA_NAME_VAR (name)
303 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
304 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
305 0)->dest
306 || EDGE_COUNT (id->entry_bb->preds) != 1))
307 {
308 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
309 gimple *init_stmt;
310 tree zero = build_zero_cst (TREE_TYPE (new_tree));
311
312 init_stmt = gimple_build_assign (new_tree, zero);
313 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
314 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
315 }
316 else
317 {
318 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
319 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
320 }
321 }
322 }
323 else
324 insert_decl_map (id, name, new_tree);
325 return new_tree;
326 }
327
328 /* Remap DECL during the copying of the BLOCK tree for the function. */
329
330 tree
331 remap_decl (tree decl, copy_body_data *id)
332 {
333 tree *n;
334
335 /* We only remap local variables in the current function. */
336
337 /* See if we have remapped this declaration. */
338
339 n = id->decl_map->get (decl);
340
341 if (!n && processing_debug_stmt)
342 {
343 processing_debug_stmt = -1;
344 return decl;
345 }
346
347 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
348 necessary DECLs have already been remapped and we do not want to duplicate
349 a decl coming from outside of the sequence we are copying. */
350 if (!n
351 && id->prevent_decl_creation_for_types
352 && id->remapping_type_depth > 0
353 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
354 return decl;
355
356 /* If we didn't already have an equivalent for this declaration, create one
357 now. */
358 if (!n)
359 {
360 /* Make a copy of the variable or label. */
361 tree t = id->copy_decl (decl, id);
362
363 /* Remember it, so that if we encounter this local entity again
364 we can reuse this copy. Do this early because remap_type may
365 need this decl for TYPE_STUB_DECL. */
366 insert_decl_map (id, decl, t);
367
368 if (!DECL_P (t))
369 return t;
370
371 /* Remap types, if necessary. */
372 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
373 if (TREE_CODE (t) == TYPE_DECL)
374 {
375 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
376
377 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
378 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
379 is not set on the TYPE_DECL, for example in LTO mode. */
380 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
381 {
382 tree x = build_variant_type_copy (TREE_TYPE (t));
383 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
384 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
385 DECL_ORIGINAL_TYPE (t) = x;
386 }
387 }
388
389 /* Remap sizes as necessary. */
390 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
391 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
392
393 /* If fields, do likewise for offset and qualifier. */
394 if (TREE_CODE (t) == FIELD_DECL)
395 {
396 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
397 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
398 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
399 }
400
401 return t;
402 }
403
404 if (id->do_not_unshare)
405 return *n;
406 else
407 return unshare_expr (*n);
408 }
409
410 static tree
411 remap_type_1 (tree type, copy_body_data *id)
412 {
413 tree new_tree, t;
414
415 /* We do need a copy. build and register it now. If this is a pointer or
416 reference type, remap the designated type and make a new pointer or
417 reference type. */
418 if (TREE_CODE (type) == POINTER_TYPE)
419 {
420 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
421 TYPE_MODE (type),
422 TYPE_REF_CAN_ALIAS_ALL (type));
423 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
424 new_tree = build_type_attribute_qual_variant (new_tree,
425 TYPE_ATTRIBUTES (type),
426 TYPE_QUALS (type));
427 insert_decl_map (id, type, new_tree);
428 return new_tree;
429 }
430 else if (TREE_CODE (type) == REFERENCE_TYPE)
431 {
432 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
433 TYPE_MODE (type),
434 TYPE_REF_CAN_ALIAS_ALL (type));
435 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
436 new_tree = build_type_attribute_qual_variant (new_tree,
437 TYPE_ATTRIBUTES (type),
438 TYPE_QUALS (type));
439 insert_decl_map (id, type, new_tree);
440 return new_tree;
441 }
442 else
443 new_tree = copy_node (type);
444
445 insert_decl_map (id, type, new_tree);
446
447 /* This is a new type, not a copy of an old type. Need to reassociate
448 variants. We can handle everything except the main variant lazily. */
449 t = TYPE_MAIN_VARIANT (type);
450 if (type != t)
451 {
452 t = remap_type (t, id);
453 TYPE_MAIN_VARIANT (new_tree) = t;
454 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
455 TYPE_NEXT_VARIANT (t) = new_tree;
456 }
457 else
458 {
459 TYPE_MAIN_VARIANT (new_tree) = new_tree;
460 TYPE_NEXT_VARIANT (new_tree) = NULL;
461 }
462
463 if (TYPE_STUB_DECL (type))
464 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
465
466 /* Lazily create pointer and reference types. */
467 TYPE_POINTER_TO (new_tree) = NULL;
468 TYPE_REFERENCE_TO (new_tree) = NULL;
469
470 /* Copy all types that may contain references to local variables; be sure to
471 preserve sharing in between type and its main variant when possible. */
472 switch (TREE_CODE (new_tree))
473 {
474 case INTEGER_TYPE:
475 case REAL_TYPE:
476 case FIXED_POINT_TYPE:
477 case ENUMERAL_TYPE:
478 case BOOLEAN_TYPE:
479 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
480 {
481 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
482 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
483
484 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
485 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
486 }
487 else
488 {
489 t = TYPE_MIN_VALUE (new_tree);
490 if (t && TREE_CODE (t) != INTEGER_CST)
491 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
492
493 t = TYPE_MAX_VALUE (new_tree);
494 if (t && TREE_CODE (t) != INTEGER_CST)
495 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
496 }
497 return new_tree;
498
499 case FUNCTION_TYPE:
500 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
501 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
502 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
503 else
504 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
505 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
506 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
507 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
508 else
509 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
510 return new_tree;
511
512 case ARRAY_TYPE:
513 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
514 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
515 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
516 else
517 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
518
519 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
520 {
521 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
522 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
523 }
524 else
525 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
526 break;
527
528 case RECORD_TYPE:
529 case UNION_TYPE:
530 case QUAL_UNION_TYPE:
531 if (TYPE_MAIN_VARIANT (type) != type
532 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
533 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
534 else
535 {
536 tree f, nf = NULL;
537
538 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
539 {
540 t = remap_decl (f, id);
541 DECL_CONTEXT (t) = new_tree;
542 DECL_CHAIN (t) = nf;
543 nf = t;
544 }
545 TYPE_FIELDS (new_tree) = nreverse (nf);
546 }
547 break;
548
549 case OFFSET_TYPE:
550 default:
551 /* Shouldn't have been thought variable sized. */
552 gcc_unreachable ();
553 }
554
555 /* All variants of type share the same size, so use the already remaped data. */
556 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
557 {
558 tree s = TYPE_SIZE (type);
559 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
560 tree su = TYPE_SIZE_UNIT (type);
561 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
562 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
563 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
564 || s == mvs);
565 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
566 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
567 || su == mvsu);
568 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
569 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
570 }
571 else
572 {
573 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
574 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
575 }
576
577 return new_tree;
578 }
579
580 tree
581 remap_type (tree type, copy_body_data *id)
582 {
583 tree *node;
584 tree tmp;
585
586 if (type == NULL)
587 return type;
588
589 /* See if we have remapped this type. */
590 node = id->decl_map->get (type);
591 if (node)
592 return *node;
593
594 /* The type only needs remapping if it's variably modified. */
595 if (! variably_modified_type_p (type, id->src_fn))
596 {
597 insert_decl_map (id, type, type);
598 return type;
599 }
600
601 id->remapping_type_depth++;
602 tmp = remap_type_1 (type, id);
603 id->remapping_type_depth--;
604
605 return tmp;
606 }
607
608 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
609
610 static bool
611 can_be_nonlocal (tree decl, copy_body_data *id)
612 {
613 /* We can not duplicate function decls. */
614 if (TREE_CODE (decl) == FUNCTION_DECL)
615 return true;
616
617 /* Local static vars must be non-local or we get multiple declaration
618 problems. */
619 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
620 return true;
621
622 return false;
623 }
624
625 static tree
626 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
627 copy_body_data *id)
628 {
629 tree old_var;
630 tree new_decls = NULL_TREE;
631
632 /* Remap its variables. */
633 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
634 {
635 tree new_var;
636
637 if (can_be_nonlocal (old_var, id))
638 {
639 /* We need to add this variable to the local decls as otherwise
640 nothing else will do so. */
641 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
642 add_local_decl (cfun, old_var);
643 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
644 && !DECL_IGNORED_P (old_var)
645 && nonlocalized_list)
646 vec_safe_push (*nonlocalized_list, old_var);
647 continue;
648 }
649
650 /* Remap the variable. */
651 new_var = remap_decl (old_var, id);
652
653 /* If we didn't remap this variable, we can't mess with its
654 TREE_CHAIN. If we remapped this variable to the return slot, it's
655 already declared somewhere else, so don't declare it here. */
656
657 if (new_var == id->retvar)
658 ;
659 else if (!new_var)
660 {
661 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
662 && !DECL_IGNORED_P (old_var)
663 && nonlocalized_list)
664 vec_safe_push (*nonlocalized_list, old_var);
665 }
666 else
667 {
668 gcc_assert (DECL_P (new_var));
669 DECL_CHAIN (new_var) = new_decls;
670 new_decls = new_var;
671
672 /* Also copy value-expressions. */
673 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
674 {
675 tree tem = DECL_VALUE_EXPR (new_var);
676 bool old_regimplify = id->regimplify;
677 id->remapping_type_depth++;
678 walk_tree (&tem, copy_tree_body_r, id, NULL);
679 id->remapping_type_depth--;
680 id->regimplify = old_regimplify;
681 SET_DECL_VALUE_EXPR (new_var, tem);
682 }
683 }
684 }
685
686 return nreverse (new_decls);
687 }
688
689 /* Copy the BLOCK to contain remapped versions of the variables
690 therein. And hook the new block into the block-tree. */
691
692 static void
693 remap_block (tree *block, copy_body_data *id)
694 {
695 tree old_block;
696 tree new_block;
697
698 /* Make the new block. */
699 old_block = *block;
700 new_block = make_node (BLOCK);
701 TREE_USED (new_block) = TREE_USED (old_block);
702 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
703 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
704 BLOCK_NONLOCALIZED_VARS (new_block)
705 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
706 *block = new_block;
707
708 /* Remap its variables. */
709 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
710 &BLOCK_NONLOCALIZED_VARS (new_block),
711 id);
712
713 if (id->transform_lang_insert_block)
714 id->transform_lang_insert_block (new_block);
715
716 /* Remember the remapped block. */
717 insert_decl_map (id, old_block, new_block);
718 }
719
720 /* Copy the whole block tree and root it in id->block. */
721 static tree
722 remap_blocks (tree block, copy_body_data *id)
723 {
724 tree t;
725 tree new_tree = block;
726
727 if (!block)
728 return NULL;
729
730 remap_block (&new_tree, id);
731 gcc_assert (new_tree != block);
732 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
733 prepend_lexical_block (new_tree, remap_blocks (t, id));
734 /* Blocks are in arbitrary order, but make things slightly prettier and do
735 not swap order when producing a copy. */
736 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
737 return new_tree;
738 }
739
740 /* Remap the block tree rooted at BLOCK to nothing. */
741 static void
742 remap_blocks_to_null (tree block, copy_body_data *id)
743 {
744 tree t;
745 insert_decl_map (id, block, NULL_TREE);
746 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
747 remap_blocks_to_null (t, id);
748 }
749
750 static void
751 copy_statement_list (tree *tp)
752 {
753 tree_stmt_iterator oi, ni;
754 tree new_tree;
755
756 new_tree = alloc_stmt_list ();
757 ni = tsi_start (new_tree);
758 oi = tsi_start (*tp);
759 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
760 *tp = new_tree;
761
762 for (; !tsi_end_p (oi); tsi_next (&oi))
763 {
764 tree stmt = tsi_stmt (oi);
765 if (TREE_CODE (stmt) == STATEMENT_LIST)
766 /* This copy is not redundant; tsi_link_after will smash this
767 STATEMENT_LIST into the end of the one we're building, and we
768 don't want to do that with the original. */
769 copy_statement_list (&stmt);
770 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
771 }
772 }
773
774 static void
775 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
776 {
777 tree block = BIND_EXPR_BLOCK (*tp);
778 /* Copy (and replace) the statement. */
779 copy_tree_r (tp, walk_subtrees, NULL);
780 if (block)
781 {
782 remap_block (&block, id);
783 BIND_EXPR_BLOCK (*tp) = block;
784 }
785
786 if (BIND_EXPR_VARS (*tp))
787 /* This will remap a lot of the same decls again, but this should be
788 harmless. */
789 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
790 }
791
792
793 /* Create a new gimple_seq by remapping all the statements in BODY
794 using the inlining information in ID. */
795
796 static gimple_seq
797 remap_gimple_seq (gimple_seq body, copy_body_data *id)
798 {
799 gimple_stmt_iterator si;
800 gimple_seq new_body = NULL;
801
802 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
803 {
804 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
805 gimple_seq_add_seq (&new_body, new_stmts);
806 }
807
808 return new_body;
809 }
810
811
812 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
813 block using the mapping information in ID. */
814
815 static gimple *
816 copy_gimple_bind (gbind *stmt, copy_body_data *id)
817 {
818 gimple *new_bind;
819 tree new_block, new_vars;
820 gimple_seq body, new_body;
821
822 /* Copy the statement. Note that we purposely don't use copy_stmt
823 here because we need to remap statements as we copy. */
824 body = gimple_bind_body (stmt);
825 new_body = remap_gimple_seq (body, id);
826
827 new_block = gimple_bind_block (stmt);
828 if (new_block)
829 remap_block (&new_block, id);
830
831 /* This will remap a lot of the same decls again, but this should be
832 harmless. */
833 new_vars = gimple_bind_vars (stmt);
834 if (new_vars)
835 new_vars = remap_decls (new_vars, NULL, id);
836
837 new_bind = gimple_build_bind (new_vars, new_body, new_block);
838
839 return new_bind;
840 }
841
842 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
843
844 static bool
845 is_parm (tree decl)
846 {
847 if (TREE_CODE (decl) == SSA_NAME)
848 {
849 decl = SSA_NAME_VAR (decl);
850 if (!decl)
851 return false;
852 }
853
854 return (TREE_CODE (decl) == PARM_DECL);
855 }
856
857 /* Remap the dependence CLIQUE from the source to the destination function
858 as specified in ID. */
859
860 static unsigned short
861 remap_dependence_clique (copy_body_data *id, unsigned short clique)
862 {
863 if (clique == 0 || processing_debug_stmt)
864 return 0;
865 if (!id->dependence_map)
866 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
867 bool existed;
868 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
869 if (!existed)
870 newc = ++cfun->last_clique;
871 return newc;
872 }
873
874 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
875 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
876 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
877 recursing into the children nodes of *TP. */
878
879 static tree
880 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
881 {
882 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
883 copy_body_data *id = (copy_body_data *) wi_p->info;
884 tree fn = id->src_fn;
885
886 /* For recursive invocations this is no longer the LHS itself. */
887 bool is_lhs = wi_p->is_lhs;
888 wi_p->is_lhs = false;
889
890 if (TREE_CODE (*tp) == SSA_NAME)
891 {
892 *tp = remap_ssa_name (*tp, id);
893 *walk_subtrees = 0;
894 if (is_lhs)
895 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
896 return NULL;
897 }
898 else if (auto_var_in_fn_p (*tp, fn))
899 {
900 /* Local variables and labels need to be replaced by equivalent
901 variables. We don't want to copy static variables; there's
902 only one of those, no matter how many times we inline the
903 containing function. Similarly for globals from an outer
904 function. */
905 tree new_decl;
906
907 /* Remap the declaration. */
908 new_decl = remap_decl (*tp, id);
909 gcc_assert (new_decl);
910 /* Replace this variable with the copy. */
911 STRIP_TYPE_NOPS (new_decl);
912 /* ??? The C++ frontend uses void * pointer zero to initialize
913 any other type. This confuses the middle-end type verification.
914 As cloned bodies do not go through gimplification again the fixup
915 there doesn't trigger. */
916 if (TREE_CODE (new_decl) == INTEGER_CST
917 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
918 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
919 *tp = new_decl;
920 *walk_subtrees = 0;
921 }
922 else if (TREE_CODE (*tp) == STATEMENT_LIST)
923 gcc_unreachable ();
924 else if (TREE_CODE (*tp) == SAVE_EXPR)
925 gcc_unreachable ();
926 else if (TREE_CODE (*tp) == LABEL_DECL
927 && (!DECL_CONTEXT (*tp)
928 || decl_function_context (*tp) == id->src_fn))
929 /* These may need to be remapped for EH handling. */
930 *tp = remap_decl (*tp, id);
931 else if (TREE_CODE (*tp) == FIELD_DECL)
932 {
933 /* If the enclosing record type is variably_modified_type_p, the field
934 has already been remapped. Otherwise, it need not be. */
935 tree *n = id->decl_map->get (*tp);
936 if (n)
937 *tp = *n;
938 *walk_subtrees = 0;
939 }
940 else if (TYPE_P (*tp))
941 /* Types may need remapping as well. */
942 *tp = remap_type (*tp, id);
943 else if (CONSTANT_CLASS_P (*tp))
944 {
945 /* If this is a constant, we have to copy the node iff the type
946 will be remapped. copy_tree_r will not copy a constant. */
947 tree new_type = remap_type (TREE_TYPE (*tp), id);
948
949 if (new_type == TREE_TYPE (*tp))
950 *walk_subtrees = 0;
951
952 else if (TREE_CODE (*tp) == INTEGER_CST)
953 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
954 else
955 {
956 *tp = copy_node (*tp);
957 TREE_TYPE (*tp) = new_type;
958 }
959 }
960 else
961 {
962 /* Otherwise, just copy the node. Note that copy_tree_r already
963 knows not to copy VAR_DECLs, etc., so this is safe. */
964
965 if (TREE_CODE (*tp) == MEM_REF)
966 {
967 /* We need to re-canonicalize MEM_REFs from inline substitutions
968 that can happen when a pointer argument is an ADDR_EXPR.
969 Recurse here manually to allow that. */
970 tree ptr = TREE_OPERAND (*tp, 0);
971 tree type = remap_type (TREE_TYPE (*tp), id);
972 tree old = *tp;
973 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
974 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
975 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
976 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
977 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
978 if (MR_DEPENDENCE_CLIQUE (old) != 0)
979 {
980 MR_DEPENDENCE_CLIQUE (*tp)
981 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
982 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
983 }
984 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
985 remapped a parameter as the property might be valid only
986 for the parameter itself. */
987 if (TREE_THIS_NOTRAP (old)
988 && (!is_parm (TREE_OPERAND (old, 0))
989 || (!id->transform_parameter && is_parm (ptr))))
990 TREE_THIS_NOTRAP (*tp) = 1;
991 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
992 *walk_subtrees = 0;
993 return NULL;
994 }
995
996 /* Here is the "usual case". Copy this tree node, and then
997 tweak some special cases. */
998 copy_tree_r (tp, walk_subtrees, NULL);
999
1000 if (TREE_CODE (*tp) != OMP_CLAUSE)
1001 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1002
1003 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1004 {
1005 /* The copied TARGET_EXPR has never been expanded, even if the
1006 original node was expanded already. */
1007 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1008 TREE_OPERAND (*tp, 3) = NULL_TREE;
1009 }
1010 else if (TREE_CODE (*tp) == ADDR_EXPR)
1011 {
1012 /* Variable substitution need not be simple. In particular,
1013 the MEM_REF substitution above. Make sure that
1014 TREE_CONSTANT and friends are up-to-date. */
1015 int invariant = is_gimple_min_invariant (*tp);
1016 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1017 recompute_tree_invariant_for_addr_expr (*tp);
1018
1019 /* If this used to be invariant, but is not any longer,
1020 then regimplification is probably needed. */
1021 if (invariant && !is_gimple_min_invariant (*tp))
1022 id->regimplify = true;
1023
1024 *walk_subtrees = 0;
1025 }
1026 }
1027
1028 /* Update the TREE_BLOCK for the cloned expr. */
1029 if (EXPR_P (*tp))
1030 {
1031 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1032 tree old_block = TREE_BLOCK (*tp);
1033 if (old_block)
1034 {
1035 tree *n;
1036 n = id->decl_map->get (TREE_BLOCK (*tp));
1037 if (n)
1038 new_block = *n;
1039 }
1040 TREE_SET_BLOCK (*tp, new_block);
1041 }
1042
1043 /* Keep iterating. */
1044 return NULL_TREE;
1045 }
1046
1047
1048 /* Called from copy_body_id via walk_tree. DATA is really a
1049 `copy_body_data *'. */
1050
1051 tree
1052 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1053 {
1054 copy_body_data *id = (copy_body_data *) data;
1055 tree fn = id->src_fn;
1056 tree new_block;
1057
1058 /* Begin by recognizing trees that we'll completely rewrite for the
1059 inlining context. Our output for these trees is completely
1060 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1061 into an edge). Further down, we'll handle trees that get
1062 duplicated and/or tweaked. */
1063
1064 /* When requested, RETURN_EXPRs should be transformed to just the
1065 contained MODIFY_EXPR. The branch semantics of the return will
1066 be handled elsewhere by manipulating the CFG rather than a statement. */
1067 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1068 {
1069 tree assignment = TREE_OPERAND (*tp, 0);
1070
1071 /* If we're returning something, just turn that into an
1072 assignment into the equivalent of the original RESULT_DECL.
1073 If the "assignment" is just the result decl, the result
1074 decl has already been set (e.g. a recent "foo (&result_decl,
1075 ...)"); just toss the entire RETURN_EXPR. */
1076 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1077 {
1078 /* Replace the RETURN_EXPR with (a copy of) the
1079 MODIFY_EXPR hanging underneath. */
1080 *tp = copy_node (assignment);
1081 }
1082 else /* Else the RETURN_EXPR returns no value. */
1083 {
1084 *tp = NULL;
1085 return (tree) (void *)1;
1086 }
1087 }
1088 else if (TREE_CODE (*tp) == SSA_NAME)
1089 {
1090 *tp = remap_ssa_name (*tp, id);
1091 *walk_subtrees = 0;
1092 return NULL;
1093 }
1094
1095 /* Local variables and labels need to be replaced by equivalent
1096 variables. We don't want to copy static variables; there's only
1097 one of those, no matter how many times we inline the containing
1098 function. Similarly for globals from an outer function. */
1099 else if (auto_var_in_fn_p (*tp, fn))
1100 {
1101 tree new_decl;
1102
1103 /* Remap the declaration. */
1104 new_decl = remap_decl (*tp, id);
1105 gcc_assert (new_decl);
1106 /* Replace this variable with the copy. */
1107 STRIP_TYPE_NOPS (new_decl);
1108 *tp = new_decl;
1109 *walk_subtrees = 0;
1110 }
1111 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1112 copy_statement_list (tp);
1113 else if (TREE_CODE (*tp) == SAVE_EXPR
1114 || TREE_CODE (*tp) == TARGET_EXPR)
1115 remap_save_expr (tp, id->decl_map, walk_subtrees);
1116 else if (TREE_CODE (*tp) == LABEL_DECL
1117 && (! DECL_CONTEXT (*tp)
1118 || decl_function_context (*tp) == id->src_fn))
1119 /* These may need to be remapped for EH handling. */
1120 *tp = remap_decl (*tp, id);
1121 else if (TREE_CODE (*tp) == BIND_EXPR)
1122 copy_bind_expr (tp, walk_subtrees, id);
1123 /* Types may need remapping as well. */
1124 else if (TYPE_P (*tp))
1125 *tp = remap_type (*tp, id);
1126
1127 /* If this is a constant, we have to copy the node iff the type will be
1128 remapped. copy_tree_r will not copy a constant. */
1129 else if (CONSTANT_CLASS_P (*tp))
1130 {
1131 tree new_type = remap_type (TREE_TYPE (*tp), id);
1132
1133 if (new_type == TREE_TYPE (*tp))
1134 *walk_subtrees = 0;
1135
1136 else if (TREE_CODE (*tp) == INTEGER_CST)
1137 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1138 else
1139 {
1140 *tp = copy_node (*tp);
1141 TREE_TYPE (*tp) = new_type;
1142 }
1143 }
1144
1145 /* Otherwise, just copy the node. Note that copy_tree_r already
1146 knows not to copy VAR_DECLs, etc., so this is safe. */
1147 else
1148 {
1149 /* Here we handle trees that are not completely rewritten.
1150 First we detect some inlining-induced bogosities for
1151 discarding. */
1152 if (TREE_CODE (*tp) == MODIFY_EXPR
1153 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1154 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1155 {
1156 /* Some assignments VAR = VAR; don't generate any rtl code
1157 and thus don't count as variable modification. Avoid
1158 keeping bogosities like 0 = 0. */
1159 tree decl = TREE_OPERAND (*tp, 0), value;
1160 tree *n;
1161
1162 n = id->decl_map->get (decl);
1163 if (n)
1164 {
1165 value = *n;
1166 STRIP_TYPE_NOPS (value);
1167 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1168 {
1169 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1170 return copy_tree_body_r (tp, walk_subtrees, data);
1171 }
1172 }
1173 }
1174 else if (TREE_CODE (*tp) == INDIRECT_REF)
1175 {
1176 /* Get rid of *& from inline substitutions that can happen when a
1177 pointer argument is an ADDR_EXPR. */
1178 tree decl = TREE_OPERAND (*tp, 0);
1179 tree *n = id->decl_map->get (decl);
1180 if (n)
1181 {
1182 /* If we happen to get an ADDR_EXPR in n->value, strip
1183 it manually here as we'll eventually get ADDR_EXPRs
1184 which lie about their types pointed to. In this case
1185 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1186 but we absolutely rely on that. As fold_indirect_ref
1187 does other useful transformations, try that first, though. */
1188 tree type = TREE_TYPE (*tp);
1189 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1190 tree old = *tp;
1191 *tp = gimple_fold_indirect_ref (ptr);
1192 if (! *tp)
1193 {
1194 type = remap_type (type, id);
1195 if (TREE_CODE (ptr) == ADDR_EXPR)
1196 {
1197 *tp
1198 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1199 /* ??? We should either assert here or build
1200 a VIEW_CONVERT_EXPR instead of blindly leaking
1201 incompatible types to our IL. */
1202 if (! *tp)
1203 *tp = TREE_OPERAND (ptr, 0);
1204 }
1205 else
1206 {
1207 *tp = build1 (INDIRECT_REF, type, ptr);
1208 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1209 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1210 TREE_READONLY (*tp) = TREE_READONLY (old);
1211 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1212 have remapped a parameter as the property might be
1213 valid only for the parameter itself. */
1214 if (TREE_THIS_NOTRAP (old)
1215 && (!is_parm (TREE_OPERAND (old, 0))
1216 || (!id->transform_parameter && is_parm (ptr))))
1217 TREE_THIS_NOTRAP (*tp) = 1;
1218 }
1219 }
1220 *walk_subtrees = 0;
1221 return NULL;
1222 }
1223 }
1224 else if (TREE_CODE (*tp) == MEM_REF)
1225 {
1226 /* We need to re-canonicalize MEM_REFs from inline substitutions
1227 that can happen when a pointer argument is an ADDR_EXPR.
1228 Recurse here manually to allow that. */
1229 tree ptr = TREE_OPERAND (*tp, 0);
1230 tree type = remap_type (TREE_TYPE (*tp), id);
1231 tree old = *tp;
1232 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1233 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1234 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1235 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1236 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1237 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1238 {
1239 MR_DEPENDENCE_CLIQUE (*tp)
1240 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1241 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1242 }
1243 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1244 remapped a parameter as the property might be valid only
1245 for the parameter itself. */
1246 if (TREE_THIS_NOTRAP (old)
1247 && (!is_parm (TREE_OPERAND (old, 0))
1248 || (!id->transform_parameter && is_parm (ptr))))
1249 TREE_THIS_NOTRAP (*tp) = 1;
1250 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1251 *walk_subtrees = 0;
1252 return NULL;
1253 }
1254
1255 /* Here is the "usual case". Copy this tree node, and then
1256 tweak some special cases. */
1257 copy_tree_r (tp, walk_subtrees, NULL);
1258
1259 /* If EXPR has block defined, map it to newly constructed block.
1260 When inlining we want EXPRs without block appear in the block
1261 of function call if we are not remapping a type. */
1262 if (EXPR_P (*tp))
1263 {
1264 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1265 if (TREE_BLOCK (*tp))
1266 {
1267 tree *n;
1268 n = id->decl_map->get (TREE_BLOCK (*tp));
1269 if (n)
1270 new_block = *n;
1271 }
1272 TREE_SET_BLOCK (*tp, new_block);
1273 }
1274
1275 if (TREE_CODE (*tp) != OMP_CLAUSE)
1276 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1277
1278 /* The copied TARGET_EXPR has never been expanded, even if the
1279 original node was expanded already. */
1280 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1281 {
1282 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1283 TREE_OPERAND (*tp, 3) = NULL_TREE;
1284 }
1285
1286 /* Variable substitution need not be simple. In particular, the
1287 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1288 and friends are up-to-date. */
1289 else if (TREE_CODE (*tp) == ADDR_EXPR)
1290 {
1291 int invariant = is_gimple_min_invariant (*tp);
1292 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1293
1294 /* Handle the case where we substituted an INDIRECT_REF
1295 into the operand of the ADDR_EXPR. */
1296 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1297 {
1298 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1299 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1300 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1301 *tp = t;
1302 }
1303 else
1304 recompute_tree_invariant_for_addr_expr (*tp);
1305
1306 /* If this used to be invariant, but is not any longer,
1307 then regimplification is probably needed. */
1308 if (invariant && !is_gimple_min_invariant (*tp))
1309 id->regimplify = true;
1310
1311 *walk_subtrees = 0;
1312 }
1313 }
1314
1315 /* Keep iterating. */
1316 return NULL_TREE;
1317 }
1318
1319 /* Helper for remap_gimple_stmt. Given an EH region number for the
1320 source function, map that to the duplicate EH region number in
1321 the destination function. */
1322
1323 static int
1324 remap_eh_region_nr (int old_nr, copy_body_data *id)
1325 {
1326 eh_region old_r, new_r;
1327
1328 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1329 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1330
1331 return new_r->index;
1332 }
1333
1334 /* Similar, but operate on INTEGER_CSTs. */
1335
1336 static tree
1337 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1338 {
1339 int old_nr, new_nr;
1340
1341 old_nr = tree_to_shwi (old_t_nr);
1342 new_nr = remap_eh_region_nr (old_nr, id);
1343
1344 return build_int_cst (integer_type_node, new_nr);
1345 }
1346
1347 /* Helper for copy_bb. Remap statement STMT using the inlining
1348 information in ID. Return the new statement copy. */
1349
1350 static gimple_seq
1351 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1352 {
1353 gimple *copy = NULL;
1354 struct walk_stmt_info wi;
1355 bool skip_first = false;
1356 gimple_seq stmts = NULL;
1357
1358 if (is_gimple_debug (stmt)
1359 && (gimple_debug_nonbind_marker_p (stmt)
1360 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1361 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1362 return stmts;
1363
1364 /* Begin by recognizing trees that we'll completely rewrite for the
1365 inlining context. Our output for these trees is completely
1366 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1367 into an edge). Further down, we'll handle trees that get
1368 duplicated and/or tweaked. */
1369
1370 /* When requested, GIMPLE_RETURNs should be transformed to just the
1371 contained GIMPLE_ASSIGN. The branch semantics of the return will
1372 be handled elsewhere by manipulating the CFG rather than the
1373 statement. */
1374 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1375 {
1376 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1377 tree retbnd = gimple_return_retbnd (stmt);
1378 tree bndslot = id->retbnd;
1379
1380 if (retbnd && bndslot)
1381 {
1382 gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
1383 memset (&wi, 0, sizeof (wi));
1384 wi.info = id;
1385 walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1386 gimple_seq_add_stmt (&stmts, bndcopy);
1387 }
1388
1389 /* If we're returning something, just turn that into an
1390 assignment into the equivalent of the original RESULT_DECL.
1391 If RETVAL is just the result decl, the result decl has
1392 already been set (e.g. a recent "foo (&result_decl, ...)");
1393 just toss the entire GIMPLE_RETURN. */
1394 if (retval
1395 && (TREE_CODE (retval) != RESULT_DECL
1396 && (TREE_CODE (retval) != SSA_NAME
1397 || ! SSA_NAME_VAR (retval)
1398 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1399 {
1400 copy = gimple_build_assign (id->do_not_unshare
1401 ? id->retvar : unshare_expr (id->retvar),
1402 retval);
1403 /* id->retvar is already substituted. Skip it on later remapping. */
1404 skip_first = true;
1405 }
1406 else
1407 return stmts;
1408 }
1409 else if (gimple_has_substatements (stmt))
1410 {
1411 gimple_seq s1, s2;
1412
1413 /* When cloning bodies from the C++ front end, we will be handed bodies
1414 in High GIMPLE form. Handle here all the High GIMPLE statements that
1415 have embedded statements. */
1416 switch (gimple_code (stmt))
1417 {
1418 case GIMPLE_BIND:
1419 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1420 break;
1421
1422 case GIMPLE_CATCH:
1423 {
1424 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1425 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1426 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1427 }
1428 break;
1429
1430 case GIMPLE_EH_FILTER:
1431 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1432 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1433 break;
1434
1435 case GIMPLE_TRY:
1436 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1437 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1438 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1439 break;
1440
1441 case GIMPLE_WITH_CLEANUP_EXPR:
1442 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1443 copy = gimple_build_wce (s1);
1444 break;
1445
1446 case GIMPLE_OMP_PARALLEL:
1447 {
1448 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1449 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1450 copy = gimple_build_omp_parallel
1451 (s1,
1452 gimple_omp_parallel_clauses (omp_par_stmt),
1453 gimple_omp_parallel_child_fn (omp_par_stmt),
1454 gimple_omp_parallel_data_arg (omp_par_stmt));
1455 }
1456 break;
1457
1458 case GIMPLE_OMP_TASK:
1459 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1460 copy = gimple_build_omp_task
1461 (s1,
1462 gimple_omp_task_clauses (stmt),
1463 gimple_omp_task_child_fn (stmt),
1464 gimple_omp_task_data_arg (stmt),
1465 gimple_omp_task_copy_fn (stmt),
1466 gimple_omp_task_arg_size (stmt),
1467 gimple_omp_task_arg_align (stmt));
1468 break;
1469
1470 case GIMPLE_OMP_FOR:
1471 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1472 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1473 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1474 gimple_omp_for_clauses (stmt),
1475 gimple_omp_for_collapse (stmt), s2);
1476 {
1477 size_t i;
1478 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1479 {
1480 gimple_omp_for_set_index (copy, i,
1481 gimple_omp_for_index (stmt, i));
1482 gimple_omp_for_set_initial (copy, i,
1483 gimple_omp_for_initial (stmt, i));
1484 gimple_omp_for_set_final (copy, i,
1485 gimple_omp_for_final (stmt, i));
1486 gimple_omp_for_set_incr (copy, i,
1487 gimple_omp_for_incr (stmt, i));
1488 gimple_omp_for_set_cond (copy, i,
1489 gimple_omp_for_cond (stmt, i));
1490 }
1491 }
1492 break;
1493
1494 case GIMPLE_OMP_MASTER:
1495 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1496 copy = gimple_build_omp_master (s1);
1497 break;
1498
1499 case GIMPLE_OMP_TASKGROUP:
1500 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1501 copy = gimple_build_omp_taskgroup (s1);
1502 break;
1503
1504 case GIMPLE_OMP_ORDERED:
1505 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1506 copy = gimple_build_omp_ordered
1507 (s1,
1508 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1509 break;
1510
1511 case GIMPLE_OMP_SECTION:
1512 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1513 copy = gimple_build_omp_section (s1);
1514 break;
1515
1516 case GIMPLE_OMP_SECTIONS:
1517 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1518 copy = gimple_build_omp_sections
1519 (s1, gimple_omp_sections_clauses (stmt));
1520 break;
1521
1522 case GIMPLE_OMP_SINGLE:
1523 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1524 copy = gimple_build_omp_single
1525 (s1, gimple_omp_single_clauses (stmt));
1526 break;
1527
1528 case GIMPLE_OMP_TARGET:
1529 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1530 copy = gimple_build_omp_target
1531 (s1, gimple_omp_target_kind (stmt),
1532 gimple_omp_target_clauses (stmt));
1533 break;
1534
1535 case GIMPLE_OMP_TEAMS:
1536 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1537 copy = gimple_build_omp_teams
1538 (s1, gimple_omp_teams_clauses (stmt));
1539 break;
1540
1541 case GIMPLE_OMP_CRITICAL:
1542 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1543 copy = gimple_build_omp_critical (s1,
1544 gimple_omp_critical_name
1545 (as_a <gomp_critical *> (stmt)),
1546 gimple_omp_critical_clauses
1547 (as_a <gomp_critical *> (stmt)));
1548 break;
1549
1550 case GIMPLE_TRANSACTION:
1551 {
1552 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1553 gtransaction *new_trans_stmt;
1554 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1555 id);
1556 copy = new_trans_stmt = gimple_build_transaction (s1);
1557 gimple_transaction_set_subcode (new_trans_stmt,
1558 gimple_transaction_subcode (old_trans_stmt));
1559 gimple_transaction_set_label_norm (new_trans_stmt,
1560 gimple_transaction_label_norm (old_trans_stmt));
1561 gimple_transaction_set_label_uninst (new_trans_stmt,
1562 gimple_transaction_label_uninst (old_trans_stmt));
1563 gimple_transaction_set_label_over (new_trans_stmt,
1564 gimple_transaction_label_over (old_trans_stmt));
1565 }
1566 break;
1567
1568 default:
1569 gcc_unreachable ();
1570 }
1571 }
1572 else
1573 {
1574 if (gimple_assign_copy_p (stmt)
1575 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1576 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1577 {
1578 /* Here we handle statements that are not completely rewritten.
1579 First we detect some inlining-induced bogosities for
1580 discarding. */
1581
1582 /* Some assignments VAR = VAR; don't generate any rtl code
1583 and thus don't count as variable modification. Avoid
1584 keeping bogosities like 0 = 0. */
1585 tree decl = gimple_assign_lhs (stmt), value;
1586 tree *n;
1587
1588 n = id->decl_map->get (decl);
1589 if (n)
1590 {
1591 value = *n;
1592 STRIP_TYPE_NOPS (value);
1593 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1594 return NULL;
1595 }
1596 }
1597
1598 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1599 in a block that we aren't copying during tree_function_versioning,
1600 just drop the clobber stmt. */
1601 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1602 {
1603 tree lhs = gimple_assign_lhs (stmt);
1604 if (TREE_CODE (lhs) == MEM_REF
1605 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1606 {
1607 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1608 if (gimple_bb (def_stmt)
1609 && !bitmap_bit_p (id->blocks_to_copy,
1610 gimple_bb (def_stmt)->index))
1611 return NULL;
1612 }
1613 }
1614
1615 if (gimple_debug_bind_p (stmt))
1616 {
1617 gdebug *copy
1618 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1619 gimple_debug_bind_get_value (stmt),
1620 stmt);
1621 id->debug_stmts.safe_push (copy);
1622 gimple_seq_add_stmt (&stmts, copy);
1623 return stmts;
1624 }
1625 if (gimple_debug_source_bind_p (stmt))
1626 {
1627 gdebug *copy = gimple_build_debug_source_bind
1628 (gimple_debug_source_bind_get_var (stmt),
1629 gimple_debug_source_bind_get_value (stmt),
1630 stmt);
1631 id->debug_stmts.safe_push (copy);
1632 gimple_seq_add_stmt (&stmts, copy);
1633 return stmts;
1634 }
1635 if (gimple_debug_nonbind_marker_p (stmt))
1636 {
1637 /* If the inlined function has too many debug markers,
1638 don't copy them. */
1639 if (id->src_cfun->debug_marker_count
1640 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1641 return stmts;
1642
1643 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1644 id->debug_stmts.safe_push (copy);
1645 gimple_seq_add_stmt (&stmts, copy);
1646 return stmts;
1647 }
1648 gcc_checking_assert (!is_gimple_debug (stmt));
1649
1650 /* Create a new deep copy of the statement. */
1651 copy = gimple_copy (stmt);
1652
1653 /* Clear flags that need revisiting. */
1654 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1655 {
1656 if (gimple_call_tail_p (call_stmt))
1657 gimple_call_set_tail (call_stmt, false);
1658 if (gimple_call_from_thunk_p (call_stmt))
1659 gimple_call_set_from_thunk (call_stmt, false);
1660 if (gimple_call_internal_p (call_stmt))
1661 switch (gimple_call_internal_fn (call_stmt))
1662 {
1663 case IFN_GOMP_SIMD_LANE:
1664 case IFN_GOMP_SIMD_VF:
1665 case IFN_GOMP_SIMD_LAST_LANE:
1666 case IFN_GOMP_SIMD_ORDERED_START:
1667 case IFN_GOMP_SIMD_ORDERED_END:
1668 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1669 break;
1670 default:
1671 break;
1672 }
1673 }
1674
1675 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1676 RESX and EH_DISPATCH. */
1677 if (id->eh_map)
1678 switch (gimple_code (copy))
1679 {
1680 case GIMPLE_CALL:
1681 {
1682 tree r, fndecl = gimple_call_fndecl (copy);
1683 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1684 switch (DECL_FUNCTION_CODE (fndecl))
1685 {
1686 case BUILT_IN_EH_COPY_VALUES:
1687 r = gimple_call_arg (copy, 1);
1688 r = remap_eh_region_tree_nr (r, id);
1689 gimple_call_set_arg (copy, 1, r);
1690 /* FALLTHRU */
1691
1692 case BUILT_IN_EH_POINTER:
1693 case BUILT_IN_EH_FILTER:
1694 r = gimple_call_arg (copy, 0);
1695 r = remap_eh_region_tree_nr (r, id);
1696 gimple_call_set_arg (copy, 0, r);
1697 break;
1698
1699 default:
1700 break;
1701 }
1702
1703 /* Reset alias info if we didn't apply measures to
1704 keep it valid over inlining by setting DECL_PT_UID. */
1705 if (!id->src_cfun->gimple_df
1706 || !id->src_cfun->gimple_df->ipa_pta)
1707 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1708 }
1709 break;
1710
1711 case GIMPLE_RESX:
1712 {
1713 gresx *resx_stmt = as_a <gresx *> (copy);
1714 int r = gimple_resx_region (resx_stmt);
1715 r = remap_eh_region_nr (r, id);
1716 gimple_resx_set_region (resx_stmt, r);
1717 }
1718 break;
1719
1720 case GIMPLE_EH_DISPATCH:
1721 {
1722 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1723 int r = gimple_eh_dispatch_region (eh_dispatch);
1724 r = remap_eh_region_nr (r, id);
1725 gimple_eh_dispatch_set_region (eh_dispatch, r);
1726 }
1727 break;
1728
1729 default:
1730 break;
1731 }
1732 }
1733
1734 /* If STMT has a block defined, map it to the newly constructed
1735 block. */
1736 if (gimple_block (copy))
1737 {
1738 tree *n;
1739 n = id->decl_map->get (gimple_block (copy));
1740 gcc_assert (n);
1741 gimple_set_block (copy, *n);
1742 }
1743
1744 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy)
1745 || gimple_debug_nonbind_marker_p (copy))
1746 {
1747 gimple_seq_add_stmt (&stmts, copy);
1748 return stmts;
1749 }
1750
1751 /* Remap all the operands in COPY. */
1752 memset (&wi, 0, sizeof (wi));
1753 wi.info = id;
1754 if (skip_first)
1755 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1756 else
1757 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1758
1759 /* Clear the copied virtual operands. We are not remapping them here
1760 but are going to recreate them from scratch. */
1761 if (gimple_has_mem_ops (copy))
1762 {
1763 gimple_set_vdef (copy, NULL_TREE);
1764 gimple_set_vuse (copy, NULL_TREE);
1765 }
1766
1767 gimple_seq_add_stmt (&stmts, copy);
1768 return stmts;
1769 }
1770
1771
1772 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1773 later */
1774
1775 static basic_block
1776 copy_bb (copy_body_data *id, basic_block bb,
1777 profile_count num, profile_count den)
1778 {
1779 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1780 basic_block copy_basic_block;
1781 tree decl;
1782 basic_block prev;
1783
1784 profile_count::adjust_for_ipa_scaling (&num, &den);
1785
1786 /* Search for previous copied basic block. */
1787 prev = bb->prev_bb;
1788 while (!prev->aux)
1789 prev = prev->prev_bb;
1790
1791 /* create_basic_block() will append every new block to
1792 basic_block_info automatically. */
1793 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1794 copy_basic_block->count = bb->count.apply_scale (num, den);
1795
1796 copy_gsi = gsi_start_bb (copy_basic_block);
1797
1798 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1799 {
1800 gimple_seq stmts;
1801 gimple *stmt = gsi_stmt (gsi);
1802 gimple *orig_stmt = stmt;
1803 gimple_stmt_iterator stmts_gsi;
1804 bool stmt_added = false;
1805
1806 id->regimplify = false;
1807 stmts = remap_gimple_stmt (stmt, id);
1808
1809 if (gimple_seq_empty_p (stmts))
1810 continue;
1811
1812 seq_gsi = copy_gsi;
1813
1814 for (stmts_gsi = gsi_start (stmts);
1815 !gsi_end_p (stmts_gsi); )
1816 {
1817 stmt = gsi_stmt (stmts_gsi);
1818
1819 /* Advance iterator now before stmt is moved to seq_gsi. */
1820 gsi_next (&stmts_gsi);
1821
1822 if (gimple_nop_p (stmt))
1823 continue;
1824
1825 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1826 orig_stmt);
1827
1828 /* With return slot optimization we can end up with
1829 non-gimple (foo *)&this->m, fix that here. */
1830 if (is_gimple_assign (stmt)
1831 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1832 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1833 {
1834 tree new_rhs;
1835 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1836 gimple_assign_rhs1 (stmt),
1837 true, NULL, false,
1838 GSI_CONTINUE_LINKING);
1839 gimple_assign_set_rhs1 (stmt, new_rhs);
1840 id->regimplify = false;
1841 }
1842
1843 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1844
1845 if (id->regimplify)
1846 gimple_regimplify_operands (stmt, &seq_gsi);
1847
1848 stmt_added = true;
1849 }
1850
1851 if (!stmt_added)
1852 continue;
1853
1854 /* If copy_basic_block has been empty at the start of this iteration,
1855 call gsi_start_bb again to get at the newly added statements. */
1856 if (gsi_end_p (copy_gsi))
1857 copy_gsi = gsi_start_bb (copy_basic_block);
1858 else
1859 gsi_next (&copy_gsi);
1860
1861 /* Process the new statement. The call to gimple_regimplify_operands
1862 possibly turned the statement into multiple statements, we
1863 need to process all of them. */
1864 do
1865 {
1866 tree fn;
1867 gcall *call_stmt;
1868
1869 stmt = gsi_stmt (copy_gsi);
1870 call_stmt = dyn_cast <gcall *> (stmt);
1871 if (call_stmt
1872 && gimple_call_va_arg_pack_p (call_stmt)
1873 && id->call_stmt
1874 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1875 {
1876 /* __builtin_va_arg_pack () should be replaced by
1877 all arguments corresponding to ... in the caller. */
1878 tree p;
1879 gcall *new_call;
1880 vec<tree> argarray;
1881 size_t nargs = gimple_call_num_args (id->call_stmt);
1882 size_t n;
1883
1884 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1885 nargs--;
1886
1887 /* Create the new array of arguments. */
1888 n = nargs + gimple_call_num_args (call_stmt);
1889 argarray.create (n);
1890 argarray.safe_grow_cleared (n);
1891
1892 /* Copy all the arguments before '...' */
1893 memcpy (argarray.address (),
1894 gimple_call_arg_ptr (call_stmt, 0),
1895 gimple_call_num_args (call_stmt) * sizeof (tree));
1896
1897 /* Append the arguments passed in '...' */
1898 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1899 gimple_call_arg_ptr (id->call_stmt, 0)
1900 + (gimple_call_num_args (id->call_stmt) - nargs),
1901 nargs * sizeof (tree));
1902
1903 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1904 argarray);
1905
1906 argarray.release ();
1907
1908 /* Copy all GIMPLE_CALL flags, location and block, except
1909 GF_CALL_VA_ARG_PACK. */
1910 gimple_call_copy_flags (new_call, call_stmt);
1911 gimple_call_set_va_arg_pack (new_call, false);
1912 gimple_set_location (new_call, gimple_location (stmt));
1913 gimple_set_block (new_call, gimple_block (stmt));
1914 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1915
1916 gsi_replace (&copy_gsi, new_call, false);
1917 stmt = new_call;
1918 }
1919 else if (call_stmt
1920 && id->call_stmt
1921 && (decl = gimple_call_fndecl (stmt))
1922 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1923 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN
1924 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1925 {
1926 /* __builtin_va_arg_pack_len () should be replaced by
1927 the number of anonymous arguments. */
1928 size_t nargs = gimple_call_num_args (id->call_stmt);
1929 tree count, p;
1930 gimple *new_stmt;
1931
1932 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1933 nargs--;
1934
1935 count = build_int_cst (integer_type_node, nargs);
1936 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1937 gsi_replace (&copy_gsi, new_stmt, false);
1938 stmt = new_stmt;
1939 }
1940 else if (call_stmt
1941 && id->call_stmt
1942 && gimple_call_internal_p (stmt)
1943 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1944 {
1945 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1946 gsi_remove (&copy_gsi, false);
1947 continue;
1948 }
1949
1950 /* Statements produced by inlining can be unfolded, especially
1951 when we constant propagated some operands. We can't fold
1952 them right now for two reasons:
1953 1) folding require SSA_NAME_DEF_STMTs to be correct
1954 2) we can't change function calls to builtins.
1955 So we just mark statement for later folding. We mark
1956 all new statements, instead just statements that has changed
1957 by some nontrivial substitution so even statements made
1958 foldable indirectly are updated. If this turns out to be
1959 expensive, copy_body can be told to watch for nontrivial
1960 changes. */
1961 if (id->statements_to_fold)
1962 id->statements_to_fold->add (stmt);
1963
1964 /* We're duplicating a CALL_EXPR. Find any corresponding
1965 callgraph edges and update or duplicate them. */
1966 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1967 {
1968 struct cgraph_edge *edge;
1969
1970 switch (id->transform_call_graph_edges)
1971 {
1972 case CB_CGE_DUPLICATE:
1973 edge = id->src_node->get_edge (orig_stmt);
1974 if (edge)
1975 {
1976 struct cgraph_edge *old_edge = edge;
1977 profile_count old_cnt = edge->count;
1978 edge = edge->clone (id->dst_node, call_stmt,
1979 gimple_uid (stmt),
1980 num, den,
1981 true);
1982
1983 /* Speculative calls consist of two edges - direct and
1984 indirect. Duplicate the whole thing and distribute
1985 frequencies accordingly. */
1986 if (edge->speculative)
1987 {
1988 struct cgraph_edge *direct, *indirect;
1989 struct ipa_ref *ref;
1990
1991 gcc_assert (!edge->indirect_unknown_callee);
1992 old_edge->speculative_call_info (direct, indirect, ref);
1993
1994 profile_count indir_cnt = indirect->count;
1995 indirect = indirect->clone (id->dst_node, call_stmt,
1996 gimple_uid (stmt),
1997 num, den,
1998 true);
1999
2000 profile_probability prob
2001 = indir_cnt.probability_in (old_cnt + indir_cnt);
2002 indirect->count
2003 = copy_basic_block->count.apply_probability (prob);
2004 edge->count = copy_basic_block->count - indirect->count;
2005 id->dst_node->clone_reference (ref, stmt);
2006 }
2007 else
2008 edge->count = copy_basic_block->count;
2009 }
2010 break;
2011
2012 case CB_CGE_MOVE_CLONES:
2013 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2014 call_stmt);
2015 edge = id->dst_node->get_edge (stmt);
2016 break;
2017
2018 case CB_CGE_MOVE:
2019 edge = id->dst_node->get_edge (orig_stmt);
2020 if (edge)
2021 edge->set_call_stmt (call_stmt);
2022 break;
2023
2024 default:
2025 gcc_unreachable ();
2026 }
2027
2028 /* Constant propagation on argument done during inlining
2029 may create new direct call. Produce an edge for it. */
2030 if ((!edge
2031 || (edge->indirect_inlining_edge
2032 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2033 && id->dst_node->definition
2034 && (fn = gimple_call_fndecl (stmt)) != NULL)
2035 {
2036 struct cgraph_node *dest = cgraph_node::get_create (fn);
2037
2038 /* We have missing edge in the callgraph. This can happen
2039 when previous inlining turned an indirect call into a
2040 direct call by constant propagating arguments or we are
2041 producing dead clone (for further cloning). In all
2042 other cases we hit a bug (incorrect node sharing is the
2043 most common reason for missing edges). */
2044 gcc_assert (!dest->definition
2045 || dest->address_taken
2046 || !id->src_node->definition
2047 || !id->dst_node->definition);
2048 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2049 id->dst_node->create_edge_including_clones
2050 (dest, orig_stmt, call_stmt, bb->count,
2051 CIF_ORIGINALLY_INDIRECT_CALL);
2052 else
2053 id->dst_node->create_edge (dest, call_stmt,
2054 bb->count)->inline_failed
2055 = CIF_ORIGINALLY_INDIRECT_CALL;
2056 if (dump_file)
2057 {
2058 fprintf (dump_file, "Created new direct edge to %s\n",
2059 dest->name ());
2060 }
2061 }
2062
2063 notice_special_calls (as_a <gcall *> (stmt));
2064 }
2065
2066 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2067 id->eh_map, id->eh_lp_nr);
2068
2069 gsi_next (&copy_gsi);
2070 }
2071 while (!gsi_end_p (copy_gsi));
2072
2073 copy_gsi = gsi_last_bb (copy_basic_block);
2074 }
2075
2076 return copy_basic_block;
2077 }
2078
2079 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2080 form is quite easy, since dominator relationship for old basic blocks does
2081 not change.
2082
2083 There is however exception where inlining might change dominator relation
2084 across EH edges from basic block within inlined functions destinating
2085 to landing pads in function we inline into.
2086
2087 The function fills in PHI_RESULTs of such PHI nodes if they refer
2088 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2089 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2090 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2091 set, and this means that there will be no overlapping live ranges
2092 for the underlying symbol.
2093
2094 This might change in future if we allow redirecting of EH edges and
2095 we might want to change way build CFG pre-inlining to include
2096 all the possible edges then. */
2097 static void
2098 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2099 bool can_throw, bool nonlocal_goto)
2100 {
2101 edge e;
2102 edge_iterator ei;
2103
2104 FOR_EACH_EDGE (e, ei, bb->succs)
2105 if (!e->dest->aux
2106 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2107 {
2108 gphi *phi;
2109 gphi_iterator si;
2110
2111 if (!nonlocal_goto)
2112 gcc_assert (e->flags & EDGE_EH);
2113
2114 if (!can_throw)
2115 gcc_assert (!(e->flags & EDGE_EH));
2116
2117 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2118 {
2119 edge re;
2120
2121 phi = si.phi ();
2122
2123 /* For abnormal goto/call edges the receiver can be the
2124 ENTRY_BLOCK. Do not assert this cannot happen. */
2125
2126 gcc_assert ((e->flags & EDGE_EH)
2127 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2128
2129 re = find_edge (ret_bb, e->dest);
2130 gcc_checking_assert (re);
2131 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2132 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2133
2134 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2135 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2136 }
2137 }
2138 }
2139
2140
2141 /* Copy edges from BB into its copy constructed earlier, scale profile
2142 accordingly. Edges will be taken care of later. Assume aux
2143 pointers to point to the copies of each BB. Return true if any
2144 debug stmts are left after a statement that must end the basic block. */
2145
2146 static bool
2147 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2148 basic_block ret_bb, basic_block abnormal_goto_dest)
2149 {
2150 basic_block new_bb = (basic_block) bb->aux;
2151 edge_iterator ei;
2152 edge old_edge;
2153 gimple_stmt_iterator si;
2154 int flags;
2155 bool need_debug_cleanup = false;
2156
2157 /* Use the indices from the original blocks to create edges for the
2158 new ones. */
2159 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2160 if (!(old_edge->flags & EDGE_EH))
2161 {
2162 edge new_edge;
2163
2164 flags = old_edge->flags;
2165
2166 /* Return edges do get a FALLTHRU flag when the get inlined. */
2167 if (old_edge->dest->index == EXIT_BLOCK
2168 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2169 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2170 flags |= EDGE_FALLTHRU;
2171 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2172 new_edge->probability = old_edge->probability;
2173 }
2174
2175 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2176 return false;
2177
2178 /* When doing function splitting, we must decreate count of the return block
2179 which was previously reachable by block we did not copy. */
2180 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2181 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2182 if (old_edge->src->index != ENTRY_BLOCK
2183 && !old_edge->src->aux)
2184 new_bb->count -= old_edge->count ().apply_scale (num, den);
2185
2186 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2187 {
2188 gimple *copy_stmt;
2189 bool can_throw, nonlocal_goto;
2190
2191 copy_stmt = gsi_stmt (si);
2192 if (!is_gimple_debug (copy_stmt))
2193 update_stmt (copy_stmt);
2194
2195 /* Do this before the possible split_block. */
2196 gsi_next (&si);
2197
2198 /* If this tree could throw an exception, there are two
2199 cases where we need to add abnormal edge(s): the
2200 tree wasn't in a region and there is a "current
2201 region" in the caller; or the original tree had
2202 EH edges. In both cases split the block after the tree,
2203 and add abnormal edge(s) as needed; we need both
2204 those from the callee and the caller.
2205 We check whether the copy can throw, because the const
2206 propagation can change an INDIRECT_REF which throws
2207 into a COMPONENT_REF which doesn't. If the copy
2208 can throw, the original could also throw. */
2209 can_throw = stmt_can_throw_internal (copy_stmt);
2210 nonlocal_goto
2211 = (stmt_can_make_abnormal_goto (copy_stmt)
2212 && !computed_goto_p (copy_stmt));
2213
2214 if (can_throw || nonlocal_goto)
2215 {
2216 if (!gsi_end_p (si))
2217 {
2218 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2219 gsi_next (&si);
2220 if (gsi_end_p (si))
2221 need_debug_cleanup = true;
2222 }
2223 if (!gsi_end_p (si))
2224 /* Note that bb's predecessor edges aren't necessarily
2225 right at this point; split_block doesn't care. */
2226 {
2227 edge e = split_block (new_bb, copy_stmt);
2228
2229 new_bb = e->dest;
2230 new_bb->aux = e->src->aux;
2231 si = gsi_start_bb (new_bb);
2232 }
2233 }
2234
2235 bool update_probs = false;
2236
2237 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2238 {
2239 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2240 update_probs = true;
2241 }
2242 else if (can_throw)
2243 {
2244 make_eh_edges (copy_stmt);
2245 update_probs = true;
2246 }
2247
2248 /* EH edges may not match old edges. Copy as much as possible. */
2249 if (update_probs)
2250 {
2251 edge e;
2252 edge_iterator ei;
2253 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2254
2255 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2256 if ((old_edge->flags & EDGE_EH)
2257 && (e = find_edge (copy_stmt_bb,
2258 (basic_block) old_edge->dest->aux))
2259 && (e->flags & EDGE_EH))
2260 e->probability = old_edge->probability;
2261
2262 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2263 if ((e->flags & EDGE_EH) && !e->probability.initialized_p ())
2264 e->probability = profile_probability::never ();
2265 }
2266
2267
2268 /* If the call we inline cannot make abnormal goto do not add
2269 additional abnormal edges but only retain those already present
2270 in the original function body. */
2271 if (abnormal_goto_dest == NULL)
2272 nonlocal_goto = false;
2273 if (nonlocal_goto)
2274 {
2275 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2276
2277 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2278 nonlocal_goto = false;
2279 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2280 in OpenMP regions which aren't allowed to be left abnormally.
2281 So, no need to add abnormal edge in that case. */
2282 else if (is_gimple_call (copy_stmt)
2283 && gimple_call_internal_p (copy_stmt)
2284 && (gimple_call_internal_fn (copy_stmt)
2285 == IFN_ABNORMAL_DISPATCHER)
2286 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2287 nonlocal_goto = false;
2288 else
2289 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2290 EDGE_ABNORMAL);
2291 }
2292
2293 if ((can_throw || nonlocal_goto)
2294 && gimple_in_ssa_p (cfun))
2295 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2296 can_throw, nonlocal_goto);
2297 }
2298 return need_debug_cleanup;
2299 }
2300
2301 /* Copy the PHIs. All blocks and edges are copied, some blocks
2302 was possibly split and new outgoing EH edges inserted.
2303 BB points to the block of original function and AUX pointers links
2304 the original and newly copied blocks. */
2305
2306 static void
2307 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2308 {
2309 basic_block const new_bb = (basic_block) bb->aux;
2310 edge_iterator ei;
2311 gphi *phi;
2312 gphi_iterator si;
2313 edge new_edge;
2314 bool inserted = false;
2315
2316 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2317 {
2318 tree res, new_res;
2319 gphi *new_phi;
2320
2321 phi = si.phi ();
2322 res = PHI_RESULT (phi);
2323 new_res = res;
2324 if (!virtual_operand_p (res))
2325 {
2326 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2327 if (EDGE_COUNT (new_bb->preds) == 0)
2328 {
2329 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2330 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2331 }
2332 else
2333 {
2334 new_phi = create_phi_node (new_res, new_bb);
2335 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2336 {
2337 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2338 bb);
2339 tree arg;
2340 tree new_arg;
2341 edge_iterator ei2;
2342 location_t locus;
2343
2344 /* When doing partial cloning, we allow PHIs on the entry
2345 block as long as all the arguments are the same.
2346 Find any input edge to see argument to copy. */
2347 if (!old_edge)
2348 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2349 if (!old_edge->src->aux)
2350 break;
2351
2352 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2353 new_arg = arg;
2354 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2355 gcc_assert (new_arg);
2356 /* With return slot optimization we can end up with
2357 non-gimple (foo *)&this->m, fix that here. */
2358 if (TREE_CODE (new_arg) != SSA_NAME
2359 && TREE_CODE (new_arg) != FUNCTION_DECL
2360 && !is_gimple_val (new_arg))
2361 {
2362 gimple_seq stmts = NULL;
2363 new_arg = force_gimple_operand (new_arg, &stmts, true,
2364 NULL);
2365 gsi_insert_seq_on_edge (new_edge, stmts);
2366 inserted = true;
2367 }
2368 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2369 if (LOCATION_BLOCK (locus))
2370 {
2371 tree *n;
2372 n = id->decl_map->get (LOCATION_BLOCK (locus));
2373 gcc_assert (n);
2374 locus = set_block (locus, *n);
2375 }
2376 else
2377 locus = LOCATION_LOCUS (locus);
2378
2379 add_phi_arg (new_phi, new_arg, new_edge, locus);
2380 }
2381 }
2382 }
2383 }
2384
2385 /* Commit the delayed edge insertions. */
2386 if (inserted)
2387 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2388 gsi_commit_one_edge_insert (new_edge, NULL);
2389 }
2390
2391
2392 /* Wrapper for remap_decl so it can be used as a callback. */
2393
2394 static tree
2395 remap_decl_1 (tree decl, void *data)
2396 {
2397 return remap_decl (decl, (copy_body_data *) data);
2398 }
2399
2400 /* Build struct function and associated datastructures for the new clone
2401 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2402 the cfun to the function of new_fndecl (and current_function_decl too). */
2403
2404 static void
2405 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2406 {
2407 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2408
2409 if (!DECL_ARGUMENTS (new_fndecl))
2410 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2411 if (!DECL_RESULT (new_fndecl))
2412 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2413
2414 /* Register specific tree functions. */
2415 gimple_register_cfg_hooks ();
2416
2417 /* Get clean struct function. */
2418 push_struct_function (new_fndecl);
2419
2420 /* We will rebuild these, so just sanity check that they are empty. */
2421 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2422 gcc_assert (cfun->local_decls == NULL);
2423 gcc_assert (cfun->cfg == NULL);
2424 gcc_assert (cfun->decl == new_fndecl);
2425
2426 /* Copy items we preserve during cloning. */
2427 cfun->static_chain_decl = src_cfun->static_chain_decl;
2428 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2429 cfun->function_end_locus = src_cfun->function_end_locus;
2430 cfun->curr_properties = src_cfun->curr_properties;
2431 cfun->last_verified = src_cfun->last_verified;
2432 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2433 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2434 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2435 cfun->stdarg = src_cfun->stdarg;
2436 cfun->after_inlining = src_cfun->after_inlining;
2437 cfun->can_throw_non_call_exceptions
2438 = src_cfun->can_throw_non_call_exceptions;
2439 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2440 cfun->returns_struct = src_cfun->returns_struct;
2441 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2442
2443 init_empty_tree_cfg ();
2444
2445 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2446
2447 profile_count num = count;
2448 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2449 profile_count::adjust_for_ipa_scaling (&num, &den);
2450
2451 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2452 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2453 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2454 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2455 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2456 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2457 if (src_cfun->eh)
2458 init_eh_for_function ();
2459
2460 if (src_cfun->gimple_df)
2461 {
2462 init_tree_ssa (cfun);
2463 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2464 if (cfun->gimple_df->in_ssa_p)
2465 init_ssa_operands (cfun);
2466 }
2467 }
2468
2469 /* Helper function for copy_cfg_body. Move debug stmts from the end
2470 of NEW_BB to the beginning of successor basic blocks when needed. If the
2471 successor has multiple predecessors, reset them, otherwise keep
2472 their value. */
2473
2474 static void
2475 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2476 {
2477 edge e;
2478 edge_iterator ei;
2479 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2480
2481 if (gsi_end_p (si)
2482 || gsi_one_before_end_p (si)
2483 || !(stmt_can_throw_internal (gsi_stmt (si))
2484 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2485 return;
2486
2487 FOR_EACH_EDGE (e, ei, new_bb->succs)
2488 {
2489 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2490 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2491 while (is_gimple_debug (gsi_stmt (ssi)))
2492 {
2493 gimple *stmt = gsi_stmt (ssi);
2494 gdebug *new_stmt;
2495 tree var;
2496 tree value;
2497
2498 /* For the last edge move the debug stmts instead of copying
2499 them. */
2500 if (ei_one_before_end_p (ei))
2501 {
2502 si = ssi;
2503 gsi_prev (&ssi);
2504 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2505 gimple_debug_bind_reset_value (stmt);
2506 gsi_remove (&si, false);
2507 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2508 continue;
2509 }
2510
2511 if (gimple_debug_bind_p (stmt))
2512 {
2513 var = gimple_debug_bind_get_var (stmt);
2514 if (single_pred_p (e->dest))
2515 {
2516 value = gimple_debug_bind_get_value (stmt);
2517 value = unshare_expr (value);
2518 }
2519 else
2520 value = NULL_TREE;
2521 new_stmt = gimple_build_debug_bind (var, value, stmt);
2522 }
2523 else if (gimple_debug_source_bind_p (stmt))
2524 {
2525 var = gimple_debug_source_bind_get_var (stmt);
2526 value = gimple_debug_source_bind_get_value (stmt);
2527 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2528 }
2529 else if (gimple_debug_nonbind_marker_p (stmt))
2530 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2531 else
2532 gcc_unreachable ();
2533 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2534 id->debug_stmts.safe_push (new_stmt);
2535 gsi_prev (&ssi);
2536 }
2537 }
2538 }
2539
2540 /* Make a copy of the sub-loops of SRC_PARENT and place them
2541 as siblings of DEST_PARENT. */
2542
2543 static void
2544 copy_loops (copy_body_data *id,
2545 struct loop *dest_parent, struct loop *src_parent)
2546 {
2547 struct loop *src_loop = src_parent->inner;
2548 while (src_loop)
2549 {
2550 if (!id->blocks_to_copy
2551 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2552 {
2553 struct loop *dest_loop = alloc_loop ();
2554
2555 /* Assign the new loop its header and latch and associate
2556 those with the new loop. */
2557 dest_loop->header = (basic_block)src_loop->header->aux;
2558 dest_loop->header->loop_father = dest_loop;
2559 if (src_loop->latch != NULL)
2560 {
2561 dest_loop->latch = (basic_block)src_loop->latch->aux;
2562 dest_loop->latch->loop_father = dest_loop;
2563 }
2564
2565 /* Copy loop meta-data. */
2566 copy_loop_info (src_loop, dest_loop);
2567
2568 /* Finally place it into the loop array and the loop tree. */
2569 place_new_loop (cfun, dest_loop);
2570 flow_loop_tree_node_add (dest_parent, dest_loop);
2571
2572 dest_loop->safelen = src_loop->safelen;
2573 if (src_loop->unroll)
2574 {
2575 dest_loop->unroll = src_loop->unroll;
2576 cfun->has_unroll = true;
2577 }
2578 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2579 if (src_loop->force_vectorize)
2580 {
2581 dest_loop->force_vectorize = true;
2582 cfun->has_force_vectorize_loops = true;
2583 }
2584 if (src_loop->simduid)
2585 {
2586 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2587 cfun->has_simduid_loops = true;
2588 }
2589
2590 /* Recurse. */
2591 copy_loops (id, dest_loop, src_loop);
2592 }
2593 src_loop = src_loop->next;
2594 }
2595 }
2596
2597 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2598
2599 void
2600 redirect_all_calls (copy_body_data * id, basic_block bb)
2601 {
2602 gimple_stmt_iterator si;
2603 gimple *last = last_stmt (bb);
2604 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2605 {
2606 gimple *stmt = gsi_stmt (si);
2607 if (is_gimple_call (stmt))
2608 {
2609 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2610 if (edge)
2611 {
2612 edge->redirect_call_stmt_to_callee ();
2613 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2614 gimple_purge_dead_eh_edges (bb);
2615 }
2616 }
2617 }
2618 }
2619
2620 /* Make a copy of the body of FN so that it can be inserted inline in
2621 another function. Walks FN via CFG, returns new fndecl. */
2622
2623 static tree
2624 copy_cfg_body (copy_body_data * id,
2625 basic_block entry_block_map, basic_block exit_block_map,
2626 basic_block new_entry)
2627 {
2628 tree callee_fndecl = id->src_fn;
2629 /* Original cfun for the callee, doesn't change. */
2630 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2631 struct function *cfun_to_copy;
2632 basic_block bb;
2633 tree new_fndecl = NULL;
2634 bool need_debug_cleanup = false;
2635 int last;
2636 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2637 profile_count num = entry_block_map->count;
2638
2639 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2640
2641 /* Register specific tree functions. */
2642 gimple_register_cfg_hooks ();
2643
2644 /* If we are inlining just region of the function, make sure to connect
2645 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2646 part of loop, we must compute frequency and probability of
2647 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2648 probabilities of edges incoming from nonduplicated region. */
2649 if (new_entry)
2650 {
2651 edge e;
2652 edge_iterator ei;
2653 den = profile_count::zero ();
2654
2655 FOR_EACH_EDGE (e, ei, new_entry->preds)
2656 if (!e->src->aux)
2657 den += e->count ();
2658 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2659 }
2660
2661 profile_count::adjust_for_ipa_scaling (&num, &den);
2662
2663 /* Must have a CFG here at this point. */
2664 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2665 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2666
2667
2668 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2669 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2670 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2671 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2672
2673 /* Duplicate any exception-handling regions. */
2674 if (cfun->eh)
2675 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2676 remap_decl_1, id);
2677
2678 /* Use aux pointers to map the original blocks to copy. */
2679 FOR_EACH_BB_FN (bb, cfun_to_copy)
2680 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2681 {
2682 basic_block new_bb = copy_bb (id, bb, num, den);
2683 bb->aux = new_bb;
2684 new_bb->aux = bb;
2685 new_bb->loop_father = entry_block_map->loop_father;
2686 }
2687
2688 last = last_basic_block_for_fn (cfun);
2689
2690 /* Now that we've duplicated the blocks, duplicate their edges. */
2691 basic_block abnormal_goto_dest = NULL;
2692 if (id->call_stmt
2693 && stmt_can_make_abnormal_goto (id->call_stmt))
2694 {
2695 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2696
2697 bb = gimple_bb (id->call_stmt);
2698 gsi_next (&gsi);
2699 if (gsi_end_p (gsi))
2700 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2701 }
2702 FOR_ALL_BB_FN (bb, cfun_to_copy)
2703 if (!id->blocks_to_copy
2704 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2705 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2706 abnormal_goto_dest);
2707
2708 if (new_entry)
2709 {
2710 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2711 EDGE_FALLTHRU);
2712 e->probability = profile_probability::always ();
2713 }
2714
2715 /* Duplicate the loop tree, if available and wanted. */
2716 if (loops_for_fn (src_cfun) != NULL
2717 && current_loops != NULL)
2718 {
2719 copy_loops (id, entry_block_map->loop_father,
2720 get_loop (src_cfun, 0));
2721 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2722 loops_state_set (LOOPS_NEED_FIXUP);
2723 }
2724
2725 /* If the loop tree in the source function needed fixup, mark the
2726 destination loop tree for fixup, too. */
2727 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2728 loops_state_set (LOOPS_NEED_FIXUP);
2729
2730 if (gimple_in_ssa_p (cfun))
2731 FOR_ALL_BB_FN (bb, cfun_to_copy)
2732 if (!id->blocks_to_copy
2733 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2734 copy_phis_for_bb (bb, id);
2735
2736 FOR_ALL_BB_FN (bb, cfun_to_copy)
2737 if (bb->aux)
2738 {
2739 if (need_debug_cleanup
2740 && bb->index != ENTRY_BLOCK
2741 && bb->index != EXIT_BLOCK)
2742 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2743 /* Update call edge destinations. This can not be done before loop
2744 info is updated, because we may split basic blocks. */
2745 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2746 && bb->index != ENTRY_BLOCK
2747 && bb->index != EXIT_BLOCK)
2748 redirect_all_calls (id, (basic_block)bb->aux);
2749 ((basic_block)bb->aux)->aux = NULL;
2750 bb->aux = NULL;
2751 }
2752
2753 /* Zero out AUX fields of newly created block during EH edge
2754 insertion. */
2755 for (; last < last_basic_block_for_fn (cfun); last++)
2756 {
2757 if (need_debug_cleanup)
2758 maybe_move_debug_stmts_to_successors (id,
2759 BASIC_BLOCK_FOR_FN (cfun, last));
2760 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2761 /* Update call edge destinations. This can not be done before loop
2762 info is updated, because we may split basic blocks. */
2763 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2764 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2765 }
2766 entry_block_map->aux = NULL;
2767 exit_block_map->aux = NULL;
2768
2769 if (id->eh_map)
2770 {
2771 delete id->eh_map;
2772 id->eh_map = NULL;
2773 }
2774 if (id->dependence_map)
2775 {
2776 delete id->dependence_map;
2777 id->dependence_map = NULL;
2778 }
2779
2780 return new_fndecl;
2781 }
2782
2783 /* Copy the debug STMT using ID. We deal with these statements in a
2784 special way: if any variable in their VALUE expression wasn't
2785 remapped yet, we won't remap it, because that would get decl uids
2786 out of sync, causing codegen differences between -g and -g0. If
2787 this arises, we drop the VALUE expression altogether. */
2788
2789 static void
2790 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2791 {
2792 tree t, *n;
2793 struct walk_stmt_info wi;
2794
2795 if (gimple_block (stmt))
2796 {
2797 n = id->decl_map->get (gimple_block (stmt));
2798 gimple_set_block (stmt, n ? *n : id->block);
2799 }
2800
2801 if (gimple_debug_nonbind_marker_p (stmt))
2802 return;
2803
2804 /* Remap all the operands in COPY. */
2805 memset (&wi, 0, sizeof (wi));
2806 wi.info = id;
2807
2808 processing_debug_stmt = 1;
2809
2810 if (gimple_debug_source_bind_p (stmt))
2811 t = gimple_debug_source_bind_get_var (stmt);
2812 else if (gimple_debug_bind_p (stmt))
2813 t = gimple_debug_bind_get_var (stmt);
2814 else
2815 gcc_unreachable ();
2816
2817 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2818 && (n = id->debug_map->get (t)))
2819 {
2820 gcc_assert (VAR_P (*n));
2821 t = *n;
2822 }
2823 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
2824 /* T is a non-localized variable. */;
2825 else
2826 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2827
2828 if (gimple_debug_bind_p (stmt))
2829 {
2830 gimple_debug_bind_set_var (stmt, t);
2831
2832 if (gimple_debug_bind_has_value_p (stmt))
2833 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2834 remap_gimple_op_r, &wi, NULL);
2835
2836 /* Punt if any decl couldn't be remapped. */
2837 if (processing_debug_stmt < 0)
2838 gimple_debug_bind_reset_value (stmt);
2839 }
2840 else if (gimple_debug_source_bind_p (stmt))
2841 {
2842 gimple_debug_source_bind_set_var (stmt, t);
2843 /* When inlining and source bind refers to one of the optimized
2844 away parameters, change the source bind into normal debug bind
2845 referring to the corresponding DEBUG_EXPR_DECL that should have
2846 been bound before the call stmt. */
2847 t = gimple_debug_source_bind_get_value (stmt);
2848 if (t != NULL_TREE
2849 && TREE_CODE (t) == PARM_DECL
2850 && id->call_stmt)
2851 {
2852 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2853 unsigned int i;
2854 if (debug_args != NULL)
2855 {
2856 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2857 if ((**debug_args)[i] == DECL_ORIGIN (t)
2858 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2859 {
2860 t = (**debug_args)[i + 1];
2861 stmt->subcode = GIMPLE_DEBUG_BIND;
2862 gimple_debug_bind_set_value (stmt, t);
2863 break;
2864 }
2865 }
2866 }
2867 if (gimple_debug_source_bind_p (stmt))
2868 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2869 remap_gimple_op_r, &wi, NULL);
2870 }
2871
2872 processing_debug_stmt = 0;
2873
2874 update_stmt (stmt);
2875 }
2876
2877 /* Process deferred debug stmts. In order to give values better odds
2878 of being successfully remapped, we delay the processing of debug
2879 stmts until all other stmts that might require remapping are
2880 processed. */
2881
2882 static void
2883 copy_debug_stmts (copy_body_data *id)
2884 {
2885 size_t i;
2886 gdebug *stmt;
2887
2888 if (!id->debug_stmts.exists ())
2889 return;
2890
2891 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2892 copy_debug_stmt (stmt, id);
2893
2894 id->debug_stmts.release ();
2895 }
2896
2897 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2898 another function. */
2899
2900 static tree
2901 copy_tree_body (copy_body_data *id)
2902 {
2903 tree fndecl = id->src_fn;
2904 tree body = DECL_SAVED_TREE (fndecl);
2905
2906 walk_tree (&body, copy_tree_body_r, id, NULL);
2907
2908 return body;
2909 }
2910
2911 /* Make a copy of the body of FN so that it can be inserted inline in
2912 another function. */
2913
2914 static tree
2915 copy_body (copy_body_data *id,
2916 basic_block entry_block_map, basic_block exit_block_map,
2917 basic_block new_entry)
2918 {
2919 tree fndecl = id->src_fn;
2920 tree body;
2921
2922 /* If this body has a CFG, walk CFG and copy. */
2923 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2924 body = copy_cfg_body (id, entry_block_map, exit_block_map,
2925 new_entry);
2926 copy_debug_stmts (id);
2927
2928 return body;
2929 }
2930
2931 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2932 defined in function FN, or of a data member thereof. */
2933
2934 static bool
2935 self_inlining_addr_expr (tree value, tree fn)
2936 {
2937 tree var;
2938
2939 if (TREE_CODE (value) != ADDR_EXPR)
2940 return false;
2941
2942 var = get_base_address (TREE_OPERAND (value, 0));
2943
2944 return var && auto_var_in_fn_p (var, fn);
2945 }
2946
2947 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2948 lexical block and line number information from base_stmt, if given,
2949 or from the last stmt of the block otherwise. */
2950
2951 static gimple *
2952 insert_init_debug_bind (copy_body_data *id,
2953 basic_block bb, tree var, tree value,
2954 gimple *base_stmt)
2955 {
2956 gimple *note;
2957 gimple_stmt_iterator gsi;
2958 tree tracked_var;
2959
2960 if (!gimple_in_ssa_p (id->src_cfun))
2961 return NULL;
2962
2963 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
2964 return NULL;
2965
2966 tracked_var = target_for_debug_bind (var);
2967 if (!tracked_var)
2968 return NULL;
2969
2970 if (bb)
2971 {
2972 gsi = gsi_last_bb (bb);
2973 if (!base_stmt && !gsi_end_p (gsi))
2974 base_stmt = gsi_stmt (gsi);
2975 }
2976
2977 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
2978
2979 if (bb)
2980 {
2981 if (!gsi_end_p (gsi))
2982 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2983 else
2984 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2985 }
2986
2987 return note;
2988 }
2989
2990 static void
2991 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
2992 {
2993 /* If VAR represents a zero-sized variable, it's possible that the
2994 assignment statement may result in no gimple statements. */
2995 if (init_stmt)
2996 {
2997 gimple_stmt_iterator si = gsi_last_bb (bb);
2998
2999 /* We can end up with init statements that store to a non-register
3000 from a rhs with a conversion. Handle that here by forcing the
3001 rhs into a temporary. gimple_regimplify_operands is not
3002 prepared to do this for us. */
3003 if (!is_gimple_debug (init_stmt)
3004 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3005 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3006 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3007 {
3008 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3009 gimple_expr_type (init_stmt),
3010 gimple_assign_rhs1 (init_stmt));
3011 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3012 GSI_NEW_STMT);
3013 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3014 gimple_assign_set_rhs1 (init_stmt, rhs);
3015 }
3016 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3017 gimple_regimplify_operands (init_stmt, &si);
3018
3019 if (!is_gimple_debug (init_stmt))
3020 {
3021 tree def = gimple_assign_lhs (init_stmt);
3022 insert_init_debug_bind (id, bb, def, def, init_stmt);
3023 }
3024 }
3025 }
3026
3027 /* Initialize parameter P with VALUE. If needed, produce init statement
3028 at the end of BB. When BB is NULL, we return init statement to be
3029 output later. */
3030 static gimple *
3031 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3032 basic_block bb, tree *vars)
3033 {
3034 gimple *init_stmt = NULL;
3035 tree var;
3036 tree rhs = value;
3037 tree def = (gimple_in_ssa_p (cfun)
3038 ? ssa_default_def (id->src_cfun, p) : NULL);
3039
3040 if (value
3041 && value != error_mark_node
3042 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3043 {
3044 /* If we can match up types by promotion/demotion do so. */
3045 if (fold_convertible_p (TREE_TYPE (p), value))
3046 rhs = fold_convert (TREE_TYPE (p), value);
3047 else
3048 {
3049 /* ??? For valid programs we should not end up here.
3050 Still if we end up with truly mismatched types here, fall back
3051 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3052 GIMPLE to the following passes. */
3053 if (!is_gimple_reg_type (TREE_TYPE (value))
3054 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3055 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3056 else
3057 rhs = build_zero_cst (TREE_TYPE (p));
3058 }
3059 }
3060
3061 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3062 here since the type of this decl must be visible to the calling
3063 function. */
3064 var = copy_decl_to_var (p, id);
3065
3066 /* Declare this new variable. */
3067 DECL_CHAIN (var) = *vars;
3068 *vars = var;
3069
3070 /* Make gimplifier happy about this variable. */
3071 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3072
3073 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3074 we would not need to create a new variable here at all, if it
3075 weren't for debug info. Still, we can just use the argument
3076 value. */
3077 if (TREE_READONLY (p)
3078 && !TREE_ADDRESSABLE (p)
3079 && value && !TREE_SIDE_EFFECTS (value)
3080 && !def)
3081 {
3082 /* We may produce non-gimple trees by adding NOPs or introduce
3083 invalid sharing when operand is not really constant.
3084 It is not big deal to prohibit constant propagation here as
3085 we will constant propagate in DOM1 pass anyway. */
3086 if (is_gimple_min_invariant (value)
3087 && useless_type_conversion_p (TREE_TYPE (p),
3088 TREE_TYPE (value))
3089 /* We have to be very careful about ADDR_EXPR. Make sure
3090 the base variable isn't a local variable of the inlined
3091 function, e.g., when doing recursive inlining, direct or
3092 mutually-recursive or whatever, which is why we don't
3093 just test whether fn == current_function_decl. */
3094 && ! self_inlining_addr_expr (value, fn))
3095 {
3096 insert_decl_map (id, p, value);
3097 insert_debug_decl_map (id, p, var);
3098 return insert_init_debug_bind (id, bb, var, value, NULL);
3099 }
3100 }
3101
3102 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3103 that way, when the PARM_DECL is encountered, it will be
3104 automatically replaced by the VAR_DECL. */
3105 insert_decl_map (id, p, var);
3106
3107 /* Even if P was TREE_READONLY, the new VAR should not be.
3108 In the original code, we would have constructed a
3109 temporary, and then the function body would have never
3110 changed the value of P. However, now, we will be
3111 constructing VAR directly. The constructor body may
3112 change its value multiple times as it is being
3113 constructed. Therefore, it must not be TREE_READONLY;
3114 the back-end assumes that TREE_READONLY variable is
3115 assigned to only once. */
3116 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3117 TREE_READONLY (var) = 0;
3118
3119 /* If there is no setup required and we are in SSA, take the easy route
3120 replacing all SSA names representing the function parameter by the
3121 SSA name passed to function.
3122
3123 We need to construct map for the variable anyway as it might be used
3124 in different SSA names when parameter is set in function.
3125
3126 Do replacement at -O0 for const arguments replaced by constant.
3127 This is important for builtin_constant_p and other construct requiring
3128 constant argument to be visible in inlined function body. */
3129 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3130 && (optimize
3131 || (TREE_READONLY (p)
3132 && is_gimple_min_invariant (rhs)))
3133 && (TREE_CODE (rhs) == SSA_NAME
3134 || is_gimple_min_invariant (rhs))
3135 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3136 {
3137 insert_decl_map (id, def, rhs);
3138 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3139 }
3140
3141 /* If the value of argument is never used, don't care about initializing
3142 it. */
3143 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3144 {
3145 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3146 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3147 }
3148
3149 /* Initialize this VAR_DECL from the equivalent argument. Convert
3150 the argument to the proper type in case it was promoted. */
3151 if (value)
3152 {
3153 if (rhs == error_mark_node)
3154 {
3155 insert_decl_map (id, p, var);
3156 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3157 }
3158
3159 STRIP_USELESS_TYPE_CONVERSION (rhs);
3160
3161 /* If we are in SSA form properly remap the default definition
3162 or assign to a dummy SSA name if the parameter is unused and
3163 we are not optimizing. */
3164 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3165 {
3166 if (def)
3167 {
3168 def = remap_ssa_name (def, id);
3169 init_stmt = gimple_build_assign (def, rhs);
3170 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3171 set_ssa_default_def (cfun, var, NULL);
3172 }
3173 else if (!optimize)
3174 {
3175 def = make_ssa_name (var);
3176 init_stmt = gimple_build_assign (def, rhs);
3177 }
3178 }
3179 else
3180 init_stmt = gimple_build_assign (var, rhs);
3181
3182 if (bb && init_stmt)
3183 insert_init_stmt (id, bb, init_stmt);
3184 }
3185 return init_stmt;
3186 }
3187
3188 /* Generate code to initialize the parameters of the function at the
3189 top of the stack in ID from the GIMPLE_CALL STMT. */
3190
3191 static void
3192 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3193 tree fn, basic_block bb)
3194 {
3195 tree parms;
3196 size_t i;
3197 tree p;
3198 tree vars = NULL_TREE;
3199 tree static_chain = gimple_call_chain (stmt);
3200
3201 /* Figure out what the parameters are. */
3202 parms = DECL_ARGUMENTS (fn);
3203
3204 /* Loop through the parameter declarations, replacing each with an
3205 equivalent VAR_DECL, appropriately initialized. */
3206 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3207 {
3208 tree val;
3209 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3210 setup_one_parameter (id, p, val, fn, bb, &vars);
3211 }
3212 /* After remapping parameters remap their types. This has to be done
3213 in a second loop over all parameters to appropriately remap
3214 variable sized arrays when the size is specified in a
3215 parameter following the array. */
3216 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3217 {
3218 tree *varp = id->decl_map->get (p);
3219 if (varp && VAR_P (*varp))
3220 {
3221 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3222 ? ssa_default_def (id->src_cfun, p) : NULL);
3223 tree var = *varp;
3224 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3225 /* Also remap the default definition if it was remapped
3226 to the default definition of the parameter replacement
3227 by the parameter setup. */
3228 if (def)
3229 {
3230 tree *defp = id->decl_map->get (def);
3231 if (defp
3232 && TREE_CODE (*defp) == SSA_NAME
3233 && SSA_NAME_VAR (*defp) == var)
3234 TREE_TYPE (*defp) = TREE_TYPE (var);
3235 }
3236 }
3237 }
3238
3239 /* Initialize the static chain. */
3240 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3241 gcc_assert (fn != current_function_decl);
3242 if (p)
3243 {
3244 /* No static chain? Seems like a bug in tree-nested.c. */
3245 gcc_assert (static_chain);
3246
3247 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3248 }
3249
3250 declare_inline_vars (id->block, vars);
3251 }
3252
3253
3254 /* Declare a return variable to replace the RESULT_DECL for the
3255 function we are calling. An appropriate DECL_STMT is returned.
3256 The USE_STMT is filled to contain a use of the declaration to
3257 indicate the return value of the function.
3258
3259 RETURN_SLOT, if non-null is place where to store the result. It
3260 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3261 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3262
3263 The return value is a (possibly null) value that holds the result
3264 as seen by the caller. */
3265
3266 static tree
3267 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3268 basic_block entry_bb)
3269 {
3270 tree callee = id->src_fn;
3271 tree result = DECL_RESULT (callee);
3272 tree callee_type = TREE_TYPE (result);
3273 tree caller_type;
3274 tree var, use;
3275
3276 /* Handle type-mismatches in the function declaration return type
3277 vs. the call expression. */
3278 if (modify_dest)
3279 caller_type = TREE_TYPE (modify_dest);
3280 else
3281 caller_type = TREE_TYPE (TREE_TYPE (callee));
3282
3283 /* We don't need to do anything for functions that don't return anything. */
3284 if (VOID_TYPE_P (callee_type))
3285 return NULL_TREE;
3286
3287 /* If there was a return slot, then the return value is the
3288 dereferenced address of that object. */
3289 if (return_slot)
3290 {
3291 /* The front end shouldn't have used both return_slot and
3292 a modify expression. */
3293 gcc_assert (!modify_dest);
3294 if (DECL_BY_REFERENCE (result))
3295 {
3296 tree return_slot_addr = build_fold_addr_expr (return_slot);
3297 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3298
3299 /* We are going to construct *&return_slot and we can't do that
3300 for variables believed to be not addressable.
3301
3302 FIXME: This check possibly can match, because values returned
3303 via return slot optimization are not believed to have address
3304 taken by alias analysis. */
3305 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3306 var = return_slot_addr;
3307 }
3308 else
3309 {
3310 var = return_slot;
3311 gcc_assert (TREE_CODE (var) != SSA_NAME);
3312 if (TREE_ADDRESSABLE (result))
3313 mark_addressable (var);
3314 }
3315 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3316 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3317 && !DECL_GIMPLE_REG_P (result)
3318 && DECL_P (var))
3319 DECL_GIMPLE_REG_P (var) = 0;
3320 use = NULL;
3321 goto done;
3322 }
3323
3324 /* All types requiring non-trivial constructors should have been handled. */
3325 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3326
3327 /* Attempt to avoid creating a new temporary variable. */
3328 if (modify_dest
3329 && TREE_CODE (modify_dest) != SSA_NAME)
3330 {
3331 bool use_it = false;
3332
3333 /* We can't use MODIFY_DEST if there's type promotion involved. */
3334 if (!useless_type_conversion_p (callee_type, caller_type))
3335 use_it = false;
3336
3337 /* ??? If we're assigning to a variable sized type, then we must
3338 reuse the destination variable, because we've no good way to
3339 create variable sized temporaries at this point. */
3340 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3341 use_it = true;
3342
3343 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3344 reuse it as the result of the call directly. Don't do this if
3345 it would promote MODIFY_DEST to addressable. */
3346 else if (TREE_ADDRESSABLE (result))
3347 use_it = false;
3348 else
3349 {
3350 tree base_m = get_base_address (modify_dest);
3351
3352 /* If the base isn't a decl, then it's a pointer, and we don't
3353 know where that's going to go. */
3354 if (!DECL_P (base_m))
3355 use_it = false;
3356 else if (is_global_var (base_m))
3357 use_it = false;
3358 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3359 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3360 && !DECL_GIMPLE_REG_P (result)
3361 && DECL_GIMPLE_REG_P (base_m))
3362 use_it = false;
3363 else if (!TREE_ADDRESSABLE (base_m))
3364 use_it = true;
3365 }
3366
3367 if (use_it)
3368 {
3369 var = modify_dest;
3370 use = NULL;
3371 goto done;
3372 }
3373 }
3374
3375 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3376
3377 var = copy_result_decl_to_var (result, id);
3378 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3379
3380 /* Do not have the rest of GCC warn about this variable as it should
3381 not be visible to the user. */
3382 TREE_NO_WARNING (var) = 1;
3383
3384 declare_inline_vars (id->block, var);
3385
3386 /* Build the use expr. If the return type of the function was
3387 promoted, convert it back to the expected type. */
3388 use = var;
3389 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3390 {
3391 /* If we can match up types by promotion/demotion do so. */
3392 if (fold_convertible_p (caller_type, var))
3393 use = fold_convert (caller_type, var);
3394 else
3395 {
3396 /* ??? For valid programs we should not end up here.
3397 Still if we end up with truly mismatched types here, fall back
3398 to using a MEM_REF to not leak invalid GIMPLE to the following
3399 passes. */
3400 /* Prevent var from being written into SSA form. */
3401 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3402 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3403 DECL_GIMPLE_REG_P (var) = false;
3404 else if (is_gimple_reg_type (TREE_TYPE (var)))
3405 TREE_ADDRESSABLE (var) = true;
3406 use = fold_build2 (MEM_REF, caller_type,
3407 build_fold_addr_expr (var),
3408 build_int_cst (ptr_type_node, 0));
3409 }
3410 }
3411
3412 STRIP_USELESS_TYPE_CONVERSION (use);
3413
3414 if (DECL_BY_REFERENCE (result))
3415 {
3416 TREE_ADDRESSABLE (var) = 1;
3417 var = build_fold_addr_expr (var);
3418 }
3419
3420 done:
3421 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3422 way, when the RESULT_DECL is encountered, it will be
3423 automatically replaced by the VAR_DECL.
3424
3425 When returning by reference, ensure that RESULT_DECL remaps to
3426 gimple_val. */
3427 if (DECL_BY_REFERENCE (result)
3428 && !is_gimple_val (var))
3429 {
3430 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3431 insert_decl_map (id, result, temp);
3432 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3433 it's default_def SSA_NAME. */
3434 if (gimple_in_ssa_p (id->src_cfun)
3435 && is_gimple_reg (result))
3436 {
3437 temp = make_ssa_name (temp);
3438 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3439 }
3440 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3441 }
3442 else
3443 insert_decl_map (id, result, var);
3444
3445 /* Remember this so we can ignore it in remap_decls. */
3446 id->retvar = var;
3447 return use;
3448 }
3449
3450 /* Determine if the function can be copied. If so return NULL. If
3451 not return a string describng the reason for failure. */
3452
3453 const char *
3454 copy_forbidden (struct function *fun)
3455 {
3456 const char *reason = fun->cannot_be_copied_reason;
3457
3458 /* Only examine the function once. */
3459 if (fun->cannot_be_copied_set)
3460 return reason;
3461
3462 /* We cannot copy a function that receives a non-local goto
3463 because we cannot remap the destination label used in the
3464 function that is performing the non-local goto. */
3465 /* ??? Actually, this should be possible, if we work at it.
3466 No doubt there's just a handful of places that simply
3467 assume it doesn't happen and don't substitute properly. */
3468 if (fun->has_nonlocal_label)
3469 {
3470 reason = G_("function %q+F can never be copied "
3471 "because it receives a non-local goto");
3472 goto fail;
3473 }
3474
3475 if (fun->has_forced_label_in_static)
3476 {
3477 reason = G_("function %q+F can never be copied because it saves "
3478 "address of local label in a static variable");
3479 goto fail;
3480 }
3481
3482 fail:
3483 fun->cannot_be_copied_reason = reason;
3484 fun->cannot_be_copied_set = true;
3485 return reason;
3486 }
3487
3488
3489 static const char *inline_forbidden_reason;
3490
3491 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3492 iff a function can not be inlined. Also sets the reason why. */
3493
3494 static tree
3495 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3496 struct walk_stmt_info *wip)
3497 {
3498 tree fn = (tree) wip->info;
3499 tree t;
3500 gimple *stmt = gsi_stmt (*gsi);
3501
3502 switch (gimple_code (stmt))
3503 {
3504 case GIMPLE_CALL:
3505 /* Refuse to inline alloca call unless user explicitly forced so as
3506 this may change program's memory overhead drastically when the
3507 function using alloca is called in loop. In GCC present in
3508 SPEC2000 inlining into schedule_block cause it to require 2GB of
3509 RAM instead of 256MB. Don't do so for alloca calls emitted for
3510 VLA objects as those can't cause unbounded growth (they're always
3511 wrapped inside stack_save/stack_restore regions. */
3512 if (gimple_maybe_alloca_call_p (stmt)
3513 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3514 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3515 {
3516 inline_forbidden_reason
3517 = G_("function %q+F can never be inlined because it uses "
3518 "alloca (override using the always_inline attribute)");
3519 *handled_ops_p = true;
3520 return fn;
3521 }
3522
3523 t = gimple_call_fndecl (stmt);
3524 if (t == NULL_TREE)
3525 break;
3526
3527 /* We cannot inline functions that call setjmp. */
3528 if (setjmp_call_p (t))
3529 {
3530 inline_forbidden_reason
3531 = G_("function %q+F can never be inlined because it uses setjmp");
3532 *handled_ops_p = true;
3533 return t;
3534 }
3535
3536 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3537 switch (DECL_FUNCTION_CODE (t))
3538 {
3539 /* We cannot inline functions that take a variable number of
3540 arguments. */
3541 case BUILT_IN_VA_START:
3542 case BUILT_IN_NEXT_ARG:
3543 case BUILT_IN_VA_END:
3544 inline_forbidden_reason
3545 = G_("function %q+F can never be inlined because it "
3546 "uses variable argument lists");
3547 *handled_ops_p = true;
3548 return t;
3549
3550 case BUILT_IN_LONGJMP:
3551 /* We can't inline functions that call __builtin_longjmp at
3552 all. The non-local goto machinery really requires the
3553 destination be in a different function. If we allow the
3554 function calling __builtin_longjmp to be inlined into the
3555 function calling __builtin_setjmp, Things will Go Awry. */
3556 inline_forbidden_reason
3557 = G_("function %q+F can never be inlined because "
3558 "it uses setjmp-longjmp exception handling");
3559 *handled_ops_p = true;
3560 return t;
3561
3562 case BUILT_IN_NONLOCAL_GOTO:
3563 /* Similarly. */
3564 inline_forbidden_reason
3565 = G_("function %q+F can never be inlined because "
3566 "it uses non-local goto");
3567 *handled_ops_p = true;
3568 return t;
3569
3570 case BUILT_IN_RETURN:
3571 case BUILT_IN_APPLY_ARGS:
3572 /* If a __builtin_apply_args caller would be inlined,
3573 it would be saving arguments of the function it has
3574 been inlined into. Similarly __builtin_return would
3575 return from the function the inline has been inlined into. */
3576 inline_forbidden_reason
3577 = G_("function %q+F can never be inlined because "
3578 "it uses __builtin_return or __builtin_apply_args");
3579 *handled_ops_p = true;
3580 return t;
3581
3582 default:
3583 break;
3584 }
3585 break;
3586
3587 case GIMPLE_GOTO:
3588 t = gimple_goto_dest (stmt);
3589
3590 /* We will not inline a function which uses computed goto. The
3591 addresses of its local labels, which may be tucked into
3592 global storage, are of course not constant across
3593 instantiations, which causes unexpected behavior. */
3594 if (TREE_CODE (t) != LABEL_DECL)
3595 {
3596 inline_forbidden_reason
3597 = G_("function %q+F can never be inlined "
3598 "because it contains a computed goto");
3599 *handled_ops_p = true;
3600 return t;
3601 }
3602 break;
3603
3604 default:
3605 break;
3606 }
3607
3608 *handled_ops_p = false;
3609 return NULL_TREE;
3610 }
3611
3612 /* Return true if FNDECL is a function that cannot be inlined into
3613 another one. */
3614
3615 static bool
3616 inline_forbidden_p (tree fndecl)
3617 {
3618 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3619 struct walk_stmt_info wi;
3620 basic_block bb;
3621 bool forbidden_p = false;
3622
3623 /* First check for shared reasons not to copy the code. */
3624 inline_forbidden_reason = copy_forbidden (fun);
3625 if (inline_forbidden_reason != NULL)
3626 return true;
3627
3628 /* Next, walk the statements of the function looking for
3629 constraucts we can't handle, or are non-optimal for inlining. */
3630 hash_set<tree> visited_nodes;
3631 memset (&wi, 0, sizeof (wi));
3632 wi.info = (void *) fndecl;
3633 wi.pset = &visited_nodes;
3634
3635 FOR_EACH_BB_FN (bb, fun)
3636 {
3637 gimple *ret;
3638 gimple_seq seq = bb_seq (bb);
3639 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3640 forbidden_p = (ret != NULL);
3641 if (forbidden_p)
3642 break;
3643 }
3644
3645 return forbidden_p;
3646 }
3647 \f
3648 /* Return false if the function FNDECL cannot be inlined on account of its
3649 attributes, true otherwise. */
3650 static bool
3651 function_attribute_inlinable_p (const_tree fndecl)
3652 {
3653 if (targetm.attribute_table)
3654 {
3655 const_tree a;
3656
3657 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3658 {
3659 const_tree name = TREE_PURPOSE (a);
3660 int i;
3661
3662 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3663 if (is_attribute_p (targetm.attribute_table[i].name, name))
3664 return targetm.function_attribute_inlinable_p (fndecl);
3665 }
3666 }
3667
3668 return true;
3669 }
3670
3671 /* Returns nonzero if FN is a function that does not have any
3672 fundamental inline blocking properties. */
3673
3674 bool
3675 tree_inlinable_function_p (tree fn)
3676 {
3677 bool inlinable = true;
3678 bool do_warning;
3679 tree always_inline;
3680
3681 /* If we've already decided this function shouldn't be inlined,
3682 there's no need to check again. */
3683 if (DECL_UNINLINABLE (fn))
3684 return false;
3685
3686 /* We only warn for functions declared `inline' by the user. */
3687 do_warning = (warn_inline
3688 && DECL_DECLARED_INLINE_P (fn)
3689 && !DECL_NO_INLINE_WARNING_P (fn)
3690 && !DECL_IN_SYSTEM_HEADER (fn));
3691
3692 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3693
3694 if (flag_no_inline
3695 && always_inline == NULL)
3696 {
3697 if (do_warning)
3698 warning (OPT_Winline, "function %q+F can never be inlined because it "
3699 "is suppressed using -fno-inline", fn);
3700 inlinable = false;
3701 }
3702
3703 else if (!function_attribute_inlinable_p (fn))
3704 {
3705 if (do_warning)
3706 warning (OPT_Winline, "function %q+F can never be inlined because it "
3707 "uses attributes conflicting with inlining", fn);
3708 inlinable = false;
3709 }
3710
3711 else if (inline_forbidden_p (fn))
3712 {
3713 /* See if we should warn about uninlinable functions. Previously,
3714 some of these warnings would be issued while trying to expand
3715 the function inline, but that would cause multiple warnings
3716 about functions that would for example call alloca. But since
3717 this a property of the function, just one warning is enough.
3718 As a bonus we can now give more details about the reason why a
3719 function is not inlinable. */
3720 if (always_inline)
3721 error (inline_forbidden_reason, fn);
3722 else if (do_warning)
3723 warning (OPT_Winline, inline_forbidden_reason, fn);
3724
3725 inlinable = false;
3726 }
3727
3728 /* Squirrel away the result so that we don't have to check again. */
3729 DECL_UNINLINABLE (fn) = !inlinable;
3730
3731 return inlinable;
3732 }
3733
3734 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3735 word size and take possible memcpy call into account and return
3736 cost based on whether optimizing for size or speed according to SPEED_P. */
3737
3738 int
3739 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3740 {
3741 HOST_WIDE_INT size;
3742
3743 gcc_assert (!VOID_TYPE_P (type));
3744
3745 if (TREE_CODE (type) == VECTOR_TYPE)
3746 {
3747 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3748 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3749 int orig_mode_size
3750 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3751 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3752 return ((orig_mode_size + simd_mode_size - 1)
3753 / simd_mode_size);
3754 }
3755
3756 size = int_size_in_bytes (type);
3757
3758 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3759 /* Cost of a memcpy call, 3 arguments and the call. */
3760 return 4;
3761 else
3762 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3763 }
3764
3765 /* Returns cost of operation CODE, according to WEIGHTS */
3766
3767 static int
3768 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3769 tree op1 ATTRIBUTE_UNUSED, tree op2)
3770 {
3771 switch (code)
3772 {
3773 /* These are "free" conversions, or their presumed cost
3774 is folded into other operations. */
3775 case RANGE_EXPR:
3776 CASE_CONVERT:
3777 case COMPLEX_EXPR:
3778 case PAREN_EXPR:
3779 case VIEW_CONVERT_EXPR:
3780 return 0;
3781
3782 /* Assign cost of 1 to usual operations.
3783 ??? We may consider mapping RTL costs to this. */
3784 case COND_EXPR:
3785 case VEC_COND_EXPR:
3786 case VEC_PERM_EXPR:
3787
3788 case PLUS_EXPR:
3789 case POINTER_PLUS_EXPR:
3790 case POINTER_DIFF_EXPR:
3791 case MINUS_EXPR:
3792 case MULT_EXPR:
3793 case MULT_HIGHPART_EXPR:
3794
3795 case ADDR_SPACE_CONVERT_EXPR:
3796 case FIXED_CONVERT_EXPR:
3797 case FIX_TRUNC_EXPR:
3798
3799 case NEGATE_EXPR:
3800 case FLOAT_EXPR:
3801 case MIN_EXPR:
3802 case MAX_EXPR:
3803 case ABS_EXPR:
3804
3805 case LSHIFT_EXPR:
3806 case RSHIFT_EXPR:
3807 case LROTATE_EXPR:
3808 case RROTATE_EXPR:
3809
3810 case BIT_IOR_EXPR:
3811 case BIT_XOR_EXPR:
3812 case BIT_AND_EXPR:
3813 case BIT_NOT_EXPR:
3814
3815 case TRUTH_ANDIF_EXPR:
3816 case TRUTH_ORIF_EXPR:
3817 case TRUTH_AND_EXPR:
3818 case TRUTH_OR_EXPR:
3819 case TRUTH_XOR_EXPR:
3820 case TRUTH_NOT_EXPR:
3821
3822 case LT_EXPR:
3823 case LE_EXPR:
3824 case GT_EXPR:
3825 case GE_EXPR:
3826 case EQ_EXPR:
3827 case NE_EXPR:
3828 case ORDERED_EXPR:
3829 case UNORDERED_EXPR:
3830
3831 case UNLT_EXPR:
3832 case UNLE_EXPR:
3833 case UNGT_EXPR:
3834 case UNGE_EXPR:
3835 case UNEQ_EXPR:
3836 case LTGT_EXPR:
3837
3838 case CONJ_EXPR:
3839
3840 case PREDECREMENT_EXPR:
3841 case PREINCREMENT_EXPR:
3842 case POSTDECREMENT_EXPR:
3843 case POSTINCREMENT_EXPR:
3844
3845 case REALIGN_LOAD_EXPR:
3846
3847 case WIDEN_SUM_EXPR:
3848 case WIDEN_MULT_EXPR:
3849 case DOT_PROD_EXPR:
3850 case SAD_EXPR:
3851 case WIDEN_MULT_PLUS_EXPR:
3852 case WIDEN_MULT_MINUS_EXPR:
3853 case WIDEN_LSHIFT_EXPR:
3854
3855 case VEC_WIDEN_MULT_HI_EXPR:
3856 case VEC_WIDEN_MULT_LO_EXPR:
3857 case VEC_WIDEN_MULT_EVEN_EXPR:
3858 case VEC_WIDEN_MULT_ODD_EXPR:
3859 case VEC_UNPACK_HI_EXPR:
3860 case VEC_UNPACK_LO_EXPR:
3861 case VEC_UNPACK_FLOAT_HI_EXPR:
3862 case VEC_UNPACK_FLOAT_LO_EXPR:
3863 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3864 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3865 case VEC_PACK_TRUNC_EXPR:
3866 case VEC_PACK_SAT_EXPR:
3867 case VEC_PACK_FIX_TRUNC_EXPR:
3868 case VEC_PACK_FLOAT_EXPR:
3869 case VEC_WIDEN_LSHIFT_HI_EXPR:
3870 case VEC_WIDEN_LSHIFT_LO_EXPR:
3871 case VEC_DUPLICATE_EXPR:
3872 case VEC_SERIES_EXPR:
3873
3874 return 1;
3875
3876 /* Few special cases of expensive operations. This is useful
3877 to avoid inlining on functions having too many of these. */
3878 case TRUNC_DIV_EXPR:
3879 case CEIL_DIV_EXPR:
3880 case FLOOR_DIV_EXPR:
3881 case ROUND_DIV_EXPR:
3882 case EXACT_DIV_EXPR:
3883 case TRUNC_MOD_EXPR:
3884 case CEIL_MOD_EXPR:
3885 case FLOOR_MOD_EXPR:
3886 case ROUND_MOD_EXPR:
3887 case RDIV_EXPR:
3888 if (TREE_CODE (op2) != INTEGER_CST)
3889 return weights->div_mod_cost;
3890 return 1;
3891
3892 /* Bit-field insertion needs several shift and mask operations. */
3893 case BIT_INSERT_EXPR:
3894 return 3;
3895
3896 default:
3897 /* We expect a copy assignment with no operator. */
3898 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3899 return 0;
3900 }
3901 }
3902
3903
3904 /* Estimate number of instructions that will be created by expanding
3905 the statements in the statement sequence STMTS.
3906 WEIGHTS contains weights attributed to various constructs. */
3907
3908 int
3909 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3910 {
3911 int cost;
3912 gimple_stmt_iterator gsi;
3913
3914 cost = 0;
3915 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3916 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3917
3918 return cost;
3919 }
3920
3921
3922 /* Estimate number of instructions that will be created by expanding STMT.
3923 WEIGHTS contains weights attributed to various constructs. */
3924
3925 int
3926 estimate_num_insns (gimple *stmt, eni_weights *weights)
3927 {
3928 unsigned cost, i;
3929 enum gimple_code code = gimple_code (stmt);
3930 tree lhs;
3931 tree rhs;
3932
3933 switch (code)
3934 {
3935 case GIMPLE_ASSIGN:
3936 /* Try to estimate the cost of assignments. We have three cases to
3937 deal with:
3938 1) Simple assignments to registers;
3939 2) Stores to things that must live in memory. This includes
3940 "normal" stores to scalars, but also assignments of large
3941 structures, or constructors of big arrays;
3942
3943 Let us look at the first two cases, assuming we have "a = b + C":
3944 <GIMPLE_ASSIGN <var_decl "a">
3945 <plus_expr <var_decl "b"> <constant C>>
3946 If "a" is a GIMPLE register, the assignment to it is free on almost
3947 any target, because "a" usually ends up in a real register. Hence
3948 the only cost of this expression comes from the PLUS_EXPR, and we
3949 can ignore the GIMPLE_ASSIGN.
3950 If "a" is not a GIMPLE register, the assignment to "a" will most
3951 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3952 of moving something into "a", which we compute using the function
3953 estimate_move_cost. */
3954 if (gimple_clobber_p (stmt))
3955 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3956
3957 lhs = gimple_assign_lhs (stmt);
3958 rhs = gimple_assign_rhs1 (stmt);
3959
3960 cost = 0;
3961
3962 /* Account for the cost of moving to / from memory. */
3963 if (gimple_store_p (stmt))
3964 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
3965 if (gimple_assign_load_p (stmt))
3966 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
3967
3968 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3969 gimple_assign_rhs1 (stmt),
3970 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3971 == GIMPLE_BINARY_RHS
3972 ? gimple_assign_rhs2 (stmt) : NULL);
3973 break;
3974
3975 case GIMPLE_COND:
3976 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3977 gimple_op (stmt, 0),
3978 gimple_op (stmt, 1));
3979 break;
3980
3981 case GIMPLE_SWITCH:
3982 {
3983 gswitch *switch_stmt = as_a <gswitch *> (stmt);
3984 /* Take into account cost of the switch + guess 2 conditional jumps for
3985 each case label.
3986
3987 TODO: once the switch expansion logic is sufficiently separated, we can
3988 do better job on estimating cost of the switch. */
3989 if (weights->time_based)
3990 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
3991 else
3992 cost = gimple_switch_num_labels (switch_stmt) * 2;
3993 }
3994 break;
3995
3996 case GIMPLE_CALL:
3997 {
3998 tree decl;
3999
4000 if (gimple_call_internal_p (stmt))
4001 return 0;
4002 else if ((decl = gimple_call_fndecl (stmt))
4003 && DECL_BUILT_IN (decl))
4004 {
4005 /* Do not special case builtins where we see the body.
4006 This just confuse inliner. */
4007 struct cgraph_node *node;
4008 if (!(node = cgraph_node::get (decl))
4009 || node->definition)
4010 ;
4011 /* For buitins that are likely expanded to nothing or
4012 inlined do not account operand costs. */
4013 else if (is_simple_builtin (decl))
4014 return 0;
4015 else if (is_inexpensive_builtin (decl))
4016 return weights->target_builtin_call_cost;
4017 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4018 {
4019 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4020 specialize the cheap expansion we do here.
4021 ??? This asks for a more general solution. */
4022 switch (DECL_FUNCTION_CODE (decl))
4023 {
4024 case BUILT_IN_POW:
4025 case BUILT_IN_POWF:
4026 case BUILT_IN_POWL:
4027 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4028 && (real_equal
4029 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4030 &dconst2)))
4031 return estimate_operator_cost
4032 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4033 gimple_call_arg (stmt, 0));
4034 break;
4035
4036 default:
4037 break;
4038 }
4039 }
4040 }
4041
4042 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4043 if (gimple_call_lhs (stmt))
4044 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4045 weights->time_based);
4046 for (i = 0; i < gimple_call_num_args (stmt); i++)
4047 {
4048 tree arg = gimple_call_arg (stmt, i);
4049 cost += estimate_move_cost (TREE_TYPE (arg),
4050 weights->time_based);
4051 }
4052 break;
4053 }
4054
4055 case GIMPLE_RETURN:
4056 return weights->return_cost;
4057
4058 case GIMPLE_GOTO:
4059 case GIMPLE_LABEL:
4060 case GIMPLE_NOP:
4061 case GIMPLE_PHI:
4062 case GIMPLE_PREDICT:
4063 case GIMPLE_DEBUG:
4064 return 0;
4065
4066 case GIMPLE_ASM:
4067 {
4068 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4069 /* 1000 means infinity. This avoids overflows later
4070 with very long asm statements. */
4071 if (count > 1000)
4072 count = 1000;
4073 return MAX (1, count);
4074 }
4075
4076 case GIMPLE_RESX:
4077 /* This is either going to be an external function call with one
4078 argument, or two register copy statements plus a goto. */
4079 return 2;
4080
4081 case GIMPLE_EH_DISPATCH:
4082 /* ??? This is going to turn into a switch statement. Ideally
4083 we'd have a look at the eh region and estimate the number of
4084 edges involved. */
4085 return 10;
4086
4087 case GIMPLE_BIND:
4088 return estimate_num_insns_seq (
4089 gimple_bind_body (as_a <gbind *> (stmt)),
4090 weights);
4091
4092 case GIMPLE_EH_FILTER:
4093 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4094
4095 case GIMPLE_CATCH:
4096 return estimate_num_insns_seq (gimple_catch_handler (
4097 as_a <gcatch *> (stmt)),
4098 weights);
4099
4100 case GIMPLE_TRY:
4101 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4102 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4103
4104 /* OMP directives are generally very expensive. */
4105
4106 case GIMPLE_OMP_RETURN:
4107 case GIMPLE_OMP_SECTIONS_SWITCH:
4108 case GIMPLE_OMP_ATOMIC_STORE:
4109 case GIMPLE_OMP_CONTINUE:
4110 /* ...except these, which are cheap. */
4111 return 0;
4112
4113 case GIMPLE_OMP_ATOMIC_LOAD:
4114 return weights->omp_cost;
4115
4116 case GIMPLE_OMP_FOR:
4117 return (weights->omp_cost
4118 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4119 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4120
4121 case GIMPLE_OMP_PARALLEL:
4122 case GIMPLE_OMP_TASK:
4123 case GIMPLE_OMP_CRITICAL:
4124 case GIMPLE_OMP_MASTER:
4125 case GIMPLE_OMP_TASKGROUP:
4126 case GIMPLE_OMP_ORDERED:
4127 case GIMPLE_OMP_SECTION:
4128 case GIMPLE_OMP_SECTIONS:
4129 case GIMPLE_OMP_SINGLE:
4130 case GIMPLE_OMP_TARGET:
4131 case GIMPLE_OMP_TEAMS:
4132 return (weights->omp_cost
4133 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4134
4135 case GIMPLE_TRANSACTION:
4136 return (weights->tm_cost
4137 + estimate_num_insns_seq (gimple_transaction_body (
4138 as_a <gtransaction *> (stmt)),
4139 weights));
4140
4141 default:
4142 gcc_unreachable ();
4143 }
4144
4145 return cost;
4146 }
4147
4148 /* Estimate number of instructions that will be created by expanding
4149 function FNDECL. WEIGHTS contains weights attributed to various
4150 constructs. */
4151
4152 int
4153 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4154 {
4155 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4156 gimple_stmt_iterator bsi;
4157 basic_block bb;
4158 int n = 0;
4159
4160 gcc_assert (my_function && my_function->cfg);
4161 FOR_EACH_BB_FN (bb, my_function)
4162 {
4163 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4164 n += estimate_num_insns (gsi_stmt (bsi), weights);
4165 }
4166
4167 return n;
4168 }
4169
4170
4171 /* Initializes weights used by estimate_num_insns. */
4172
4173 void
4174 init_inline_once (void)
4175 {
4176 eni_size_weights.call_cost = 1;
4177 eni_size_weights.indirect_call_cost = 3;
4178 eni_size_weights.target_builtin_call_cost = 1;
4179 eni_size_weights.div_mod_cost = 1;
4180 eni_size_weights.omp_cost = 40;
4181 eni_size_weights.tm_cost = 10;
4182 eni_size_weights.time_based = false;
4183 eni_size_weights.return_cost = 1;
4184
4185 /* Estimating time for call is difficult, since we have no idea what the
4186 called function does. In the current uses of eni_time_weights,
4187 underestimating the cost does less harm than overestimating it, so
4188 we choose a rather small value here. */
4189 eni_time_weights.call_cost = 10;
4190 eni_time_weights.indirect_call_cost = 15;
4191 eni_time_weights.target_builtin_call_cost = 1;
4192 eni_time_weights.div_mod_cost = 10;
4193 eni_time_weights.omp_cost = 40;
4194 eni_time_weights.tm_cost = 40;
4195 eni_time_weights.time_based = true;
4196 eni_time_weights.return_cost = 2;
4197 }
4198
4199
4200 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4201
4202 static void
4203 prepend_lexical_block (tree current_block, tree new_block)
4204 {
4205 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4206 BLOCK_SUBBLOCKS (current_block) = new_block;
4207 BLOCK_SUPERCONTEXT (new_block) = current_block;
4208 }
4209
4210 /* Add local variables from CALLEE to CALLER. */
4211
4212 static inline void
4213 add_local_variables (struct function *callee, struct function *caller,
4214 copy_body_data *id)
4215 {
4216 tree var;
4217 unsigned ix;
4218
4219 FOR_EACH_LOCAL_DECL (callee, ix, var)
4220 if (!can_be_nonlocal (var, id))
4221 {
4222 tree new_var = remap_decl (var, id);
4223
4224 /* Remap debug-expressions. */
4225 if (VAR_P (new_var)
4226 && DECL_HAS_DEBUG_EXPR_P (var)
4227 && new_var != var)
4228 {
4229 tree tem = DECL_DEBUG_EXPR (var);
4230 bool old_regimplify = id->regimplify;
4231 id->remapping_type_depth++;
4232 walk_tree (&tem, copy_tree_body_r, id, NULL);
4233 id->remapping_type_depth--;
4234 id->regimplify = old_regimplify;
4235 SET_DECL_DEBUG_EXPR (new_var, tem);
4236 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4237 }
4238 add_local_decl (caller, new_var);
4239 }
4240 }
4241
4242 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4243 have brought in or introduced any debug stmts for SRCVAR. */
4244
4245 static inline void
4246 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4247 {
4248 tree *remappedvarp = id->decl_map->get (srcvar);
4249
4250 if (!remappedvarp)
4251 return;
4252
4253 if (!VAR_P (*remappedvarp))
4254 return;
4255
4256 if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4257 return;
4258
4259 tree tvar = target_for_debug_bind (*remappedvarp);
4260 if (!tvar)
4261 return;
4262
4263 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4264 id->call_stmt);
4265 gimple_seq_add_stmt (bindings, stmt);
4266 }
4267
4268 /* For each inlined variable for which we may have debug bind stmts,
4269 add before GSI a final debug stmt resetting it, marking the end of
4270 its life, so that var-tracking knows it doesn't have to compute
4271 further locations for it. */
4272
4273 static inline void
4274 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4275 {
4276 tree var;
4277 unsigned ix;
4278 gimple_seq bindings = NULL;
4279
4280 if (!gimple_in_ssa_p (id->src_cfun))
4281 return;
4282
4283 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4284 return;
4285
4286 for (var = DECL_ARGUMENTS (id->src_fn);
4287 var; var = DECL_CHAIN (var))
4288 reset_debug_binding (id, var, &bindings);
4289
4290 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4291 reset_debug_binding (id, var, &bindings);
4292
4293 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4294 }
4295
4296 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4297
4298 static bool
4299 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4300 {
4301 tree use_retvar;
4302 tree fn;
4303 hash_map<tree, tree> *dst;
4304 hash_map<tree, tree> *st = NULL;
4305 tree return_slot;
4306 tree modify_dest;
4307 struct cgraph_edge *cg_edge;
4308 cgraph_inline_failed_t reason;
4309 basic_block return_block;
4310 edge e;
4311 gimple_stmt_iterator gsi, stmt_gsi;
4312 bool successfully_inlined = false;
4313 bool purge_dead_abnormal_edges;
4314 gcall *call_stmt;
4315 unsigned int prop_mask, src_properties;
4316 struct function *dst_cfun;
4317 tree simduid;
4318 use_operand_p use;
4319 gimple *simtenter_stmt = NULL;
4320 vec<tree> *simtvars_save;
4321
4322 /* The gimplifier uses input_location in too many places, such as
4323 internal_get_tmp_var (). */
4324 location_t saved_location = input_location;
4325 input_location = gimple_location (stmt);
4326
4327 /* From here on, we're only interested in CALL_EXPRs. */
4328 call_stmt = dyn_cast <gcall *> (stmt);
4329 if (!call_stmt)
4330 goto egress;
4331
4332 cg_edge = id->dst_node->get_edge (stmt);
4333 gcc_checking_assert (cg_edge);
4334 /* First, see if we can figure out what function is being called.
4335 If we cannot, then there is no hope of inlining the function. */
4336 if (cg_edge->indirect_unknown_callee)
4337 goto egress;
4338 fn = cg_edge->callee->decl;
4339 gcc_checking_assert (fn);
4340
4341 /* If FN is a declaration of a function in a nested scope that was
4342 globally declared inline, we don't set its DECL_INITIAL.
4343 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4344 C++ front-end uses it for cdtors to refer to their internal
4345 declarations, that are not real functions. Fortunately those
4346 don't have trees to be saved, so we can tell by checking their
4347 gimple_body. */
4348 if (!DECL_INITIAL (fn)
4349 && DECL_ABSTRACT_ORIGIN (fn)
4350 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4351 fn = DECL_ABSTRACT_ORIGIN (fn);
4352
4353 /* Don't try to inline functions that are not well-suited to inlining. */
4354 if (cg_edge->inline_failed)
4355 {
4356 reason = cg_edge->inline_failed;
4357 /* If this call was originally indirect, we do not want to emit any
4358 inlining related warnings or sorry messages because there are no
4359 guarantees regarding those. */
4360 if (cg_edge->indirect_inlining_edge)
4361 goto egress;
4362
4363 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4364 /* For extern inline functions that get redefined we always
4365 silently ignored always_inline flag. Better behavior would
4366 be to be able to keep both bodies and use extern inline body
4367 for inlining, but we can't do that because frontends overwrite
4368 the body. */
4369 && !cg_edge->callee->local.redefined_extern_inline
4370 /* During early inline pass, report only when optimization is
4371 not turned on. */
4372 && (symtab->global_info_ready
4373 || !optimize
4374 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4375 /* PR 20090218-1_0.c. Body can be provided by another module. */
4376 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4377 {
4378 error ("inlining failed in call to always_inline %q+F: %s", fn,
4379 cgraph_inline_failed_string (reason));
4380 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4381 inform (gimple_location (stmt), "called from here");
4382 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4383 inform (DECL_SOURCE_LOCATION (cfun->decl),
4384 "called from this function");
4385 }
4386 else if (warn_inline
4387 && DECL_DECLARED_INLINE_P (fn)
4388 && !DECL_NO_INLINE_WARNING_P (fn)
4389 && !DECL_IN_SYSTEM_HEADER (fn)
4390 && reason != CIF_UNSPECIFIED
4391 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4392 /* Do not warn about not inlined recursive calls. */
4393 && !cg_edge->recursive_p ()
4394 /* Avoid warnings during early inline pass. */
4395 && symtab->global_info_ready)
4396 {
4397 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4398 fn, _(cgraph_inline_failed_string (reason))))
4399 {
4400 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4401 inform (gimple_location (stmt), "called from here");
4402 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4403 inform (DECL_SOURCE_LOCATION (cfun->decl),
4404 "called from this function");
4405 }
4406 }
4407 goto egress;
4408 }
4409 id->src_node = cg_edge->callee;
4410
4411 /* If callee is thunk, all we need is to adjust the THIS pointer
4412 and redirect to function being thunked. */
4413 if (id->src_node->thunk.thunk_p)
4414 {
4415 cgraph_edge *edge;
4416 tree virtual_offset = NULL;
4417 profile_count count = cg_edge->count;
4418 tree op;
4419 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4420
4421 cg_edge->remove ();
4422 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4423 gimple_uid (stmt),
4424 profile_count::one (),
4425 profile_count::one (),
4426 true);
4427 edge->count = count;
4428 if (id->src_node->thunk.virtual_offset_p)
4429 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4430 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4431 NULL);
4432 gsi_insert_before (&iter, gimple_build_assign (op,
4433 gimple_call_arg (stmt, 0)),
4434 GSI_NEW_STMT);
4435 gcc_assert (id->src_node->thunk.this_adjusting);
4436 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4437 virtual_offset);
4438
4439 gimple_call_set_arg (stmt, 0, op);
4440 gimple_call_set_fndecl (stmt, edge->callee->decl);
4441 update_stmt (stmt);
4442 id->src_node->remove ();
4443 expand_call_inline (bb, stmt, id);
4444 maybe_remove_unused_call_args (cfun, stmt);
4445 return true;
4446 }
4447 fn = cg_edge->callee->decl;
4448 cg_edge->callee->get_untransformed_body ();
4449
4450 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4451 cg_edge->callee->verify ();
4452
4453 /* We will be inlining this callee. */
4454 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4455 id->assign_stmts.create (0);
4456
4457 /* Update the callers EH personality. */
4458 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4459 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4460 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4461
4462 /* Split the block before the GIMPLE_CALL. */
4463 stmt_gsi = gsi_for_stmt (stmt);
4464 gsi_prev (&stmt_gsi);
4465 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4466 bb = e->src;
4467 return_block = e->dest;
4468 remove_edge (e);
4469
4470 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4471 been the source of abnormal edges. In this case, schedule
4472 the removal of dead abnormal edges. */
4473 gsi = gsi_start_bb (return_block);
4474 gsi_next (&gsi);
4475 purge_dead_abnormal_edges = gsi_end_p (gsi);
4476
4477 stmt_gsi = gsi_start_bb (return_block);
4478
4479 /* Build a block containing code to initialize the arguments, the
4480 actual inline expansion of the body, and a label for the return
4481 statements within the function to jump to. The type of the
4482 statement expression is the return type of the function call.
4483 ??? If the call does not have an associated block then we will
4484 remap all callee blocks to NULL, effectively dropping most of
4485 its debug information. This should only happen for calls to
4486 artificial decls inserted by the compiler itself. We need to
4487 either link the inlined blocks into the caller block tree or
4488 not refer to them in any way to not break GC for locations. */
4489 if (gimple_block (stmt))
4490 {
4491 id->block = make_node (BLOCK);
4492 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4493 BLOCK_SOURCE_LOCATION (id->block)
4494 = LOCATION_LOCUS (gimple_location (stmt));
4495 prepend_lexical_block (gimple_block (stmt), id->block);
4496 }
4497
4498 /* Local declarations will be replaced by their equivalents in this
4499 map. */
4500 st = id->decl_map;
4501 id->decl_map = new hash_map<tree, tree>;
4502 dst = id->debug_map;
4503 id->debug_map = NULL;
4504
4505 /* Record the function we are about to inline. */
4506 id->src_fn = fn;
4507 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4508 id->call_stmt = call_stmt;
4509
4510 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4511 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4512 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4513 simtvars_save = id->dst_simt_vars;
4514 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4515 && (simduid = bb->loop_father->simduid) != NULL_TREE
4516 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4517 && single_imm_use (simduid, &use, &simtenter_stmt)
4518 && is_gimple_call (simtenter_stmt)
4519 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4520 vec_alloc (id->dst_simt_vars, 0);
4521 else
4522 id->dst_simt_vars = NULL;
4523
4524 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4525 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4526
4527 /* If the src function contains an IFN_VA_ARG, then so will the dst
4528 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4529 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4530 src_properties = id->src_cfun->curr_properties & prop_mask;
4531 if (src_properties != prop_mask)
4532 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4533
4534 gcc_assert (!id->src_cfun->after_inlining);
4535
4536 id->entry_bb = bb;
4537 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4538 {
4539 gimple_stmt_iterator si = gsi_last_bb (bb);
4540 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4541 NOT_TAKEN),
4542 GSI_NEW_STMT);
4543 }
4544 initialize_inlined_parameters (id, stmt, fn, bb);
4545 if (debug_nonbind_markers_p && debug_inline_points && id->block
4546 && inlined_function_outer_scope_p (id->block))
4547 {
4548 gimple_stmt_iterator si = gsi_last_bb (bb);
4549 gsi_insert_after (&si, gimple_build_debug_inline_entry
4550 (id->block, input_location), GSI_NEW_STMT);
4551 }
4552
4553 if (DECL_INITIAL (fn))
4554 {
4555 if (gimple_block (stmt))
4556 {
4557 tree *var;
4558
4559 prepend_lexical_block (id->block,
4560 remap_blocks (DECL_INITIAL (fn), id));
4561 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4562 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4563 == NULL_TREE));
4564 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4565 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4566 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4567 under it. The parameters can be then evaluated in the debugger,
4568 but don't show in backtraces. */
4569 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4570 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4571 {
4572 tree v = *var;
4573 *var = TREE_CHAIN (v);
4574 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4575 BLOCK_VARS (id->block) = v;
4576 }
4577 else
4578 var = &TREE_CHAIN (*var);
4579 }
4580 else
4581 remap_blocks_to_null (DECL_INITIAL (fn), id);
4582 }
4583
4584 /* Return statements in the function body will be replaced by jumps
4585 to the RET_LABEL. */
4586 gcc_assert (DECL_INITIAL (fn));
4587 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4588
4589 /* Find the LHS to which the result of this call is assigned. */
4590 return_slot = NULL;
4591 if (gimple_call_lhs (stmt))
4592 {
4593 modify_dest = gimple_call_lhs (stmt);
4594
4595 /* The function which we are inlining might not return a value,
4596 in which case we should issue a warning that the function
4597 does not return a value. In that case the optimizers will
4598 see that the variable to which the value is assigned was not
4599 initialized. We do not want to issue a warning about that
4600 uninitialized variable. */
4601 if (DECL_P (modify_dest))
4602 TREE_NO_WARNING (modify_dest) = 1;
4603
4604 if (gimple_call_return_slot_opt_p (call_stmt))
4605 {
4606 return_slot = modify_dest;
4607 modify_dest = NULL;
4608 }
4609 }
4610 else
4611 modify_dest = NULL;
4612
4613 /* If we are inlining a call to the C++ operator new, we don't want
4614 to use type based alias analysis on the return value. Otherwise
4615 we may get confused if the compiler sees that the inlined new
4616 function returns a pointer which was just deleted. See bug
4617 33407. */
4618 if (DECL_IS_OPERATOR_NEW (fn))
4619 {
4620 return_slot = NULL;
4621 modify_dest = NULL;
4622 }
4623
4624 /* Declare the return variable for the function. */
4625 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4626
4627 /* Add local vars in this inlined callee to caller. */
4628 add_local_variables (id->src_cfun, cfun, id);
4629
4630 if (dump_file && (dump_flags & TDF_DETAILS))
4631 {
4632 fprintf (dump_file, "Inlining %s to %s with frequency %4.2f\n",
4633 id->src_node->dump_name (),
4634 id->dst_node->dump_name (),
4635 cg_edge->sreal_frequency ().to_double ());
4636 id->src_node->dump (dump_file);
4637 id->dst_node->dump (dump_file);
4638 }
4639
4640 /* This is it. Duplicate the callee body. Assume callee is
4641 pre-gimplified. Note that we must not alter the caller
4642 function in any way before this point, as this CALL_EXPR may be
4643 a self-referential call; if we're calling ourselves, we need to
4644 duplicate our body before altering anything. */
4645 copy_body (id, bb, return_block, NULL);
4646
4647 reset_debug_bindings (id, stmt_gsi);
4648
4649 if (flag_stack_reuse != SR_NONE)
4650 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4651 if (!TREE_THIS_VOLATILE (p))
4652 {
4653 tree *varp = id->decl_map->get (p);
4654 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4655 {
4656 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4657 gimple *clobber_stmt;
4658 TREE_THIS_VOLATILE (clobber) = 1;
4659 clobber_stmt = gimple_build_assign (*varp, clobber);
4660 gimple_set_location (clobber_stmt, gimple_location (stmt));
4661 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4662 }
4663 }
4664
4665 /* Reset the escaped solution. */
4666 if (cfun->gimple_df)
4667 pt_solution_reset (&cfun->gimple_df->escaped);
4668
4669 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4670 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4671 {
4672 size_t nargs = gimple_call_num_args (simtenter_stmt);
4673 vec<tree> *vars = id->dst_simt_vars;
4674 auto_vec<tree> newargs (nargs + vars->length ());
4675 for (size_t i = 0; i < nargs; i++)
4676 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4677 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4678 {
4679 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4680 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4681 }
4682 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4683 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4684 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4685 gsi_replace (&gsi, g, false);
4686 }
4687 vec_free (id->dst_simt_vars);
4688 id->dst_simt_vars = simtvars_save;
4689
4690 /* Clean up. */
4691 if (id->debug_map)
4692 {
4693 delete id->debug_map;
4694 id->debug_map = dst;
4695 }
4696 delete id->decl_map;
4697 id->decl_map = st;
4698
4699 /* Unlink the calls virtual operands before replacing it. */
4700 unlink_stmt_vdef (stmt);
4701 if (gimple_vdef (stmt)
4702 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4703 release_ssa_name (gimple_vdef (stmt));
4704
4705 /* If the inlined function returns a result that we care about,
4706 substitute the GIMPLE_CALL with an assignment of the return
4707 variable to the LHS of the call. That is, if STMT was
4708 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4709 if (use_retvar && gimple_call_lhs (stmt))
4710 {
4711 gimple *old_stmt = stmt;
4712 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4713 gsi_replace (&stmt_gsi, stmt, false);
4714 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4715 /* Append a clobber for id->retvar if easily possible. */
4716 if (flag_stack_reuse != SR_NONE
4717 && id->retvar
4718 && VAR_P (id->retvar)
4719 && id->retvar != return_slot
4720 && id->retvar != modify_dest
4721 && !TREE_THIS_VOLATILE (id->retvar)
4722 && !is_gimple_reg (id->retvar)
4723 && !stmt_ends_bb_p (stmt))
4724 {
4725 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4726 gimple *clobber_stmt;
4727 TREE_THIS_VOLATILE (clobber) = 1;
4728 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4729 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4730 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4731 }
4732 }
4733 else
4734 {
4735 /* Handle the case of inlining a function with no return
4736 statement, which causes the return value to become undefined. */
4737 if (gimple_call_lhs (stmt)
4738 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4739 {
4740 tree name = gimple_call_lhs (stmt);
4741 tree var = SSA_NAME_VAR (name);
4742 tree def = var ? ssa_default_def (cfun, var) : NULL;
4743
4744 if (def)
4745 {
4746 /* If the variable is used undefined, make this name
4747 undefined via a move. */
4748 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4749 gsi_replace (&stmt_gsi, stmt, true);
4750 }
4751 else
4752 {
4753 if (!var)
4754 {
4755 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4756 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4757 }
4758 /* Otherwise make this variable undefined. */
4759 gsi_remove (&stmt_gsi, true);
4760 set_ssa_default_def (cfun, var, name);
4761 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4762 }
4763 }
4764 /* Replace with a clobber for id->retvar. */
4765 else if (flag_stack_reuse != SR_NONE
4766 && id->retvar
4767 && VAR_P (id->retvar)
4768 && id->retvar != return_slot
4769 && id->retvar != modify_dest
4770 && !TREE_THIS_VOLATILE (id->retvar)
4771 && !is_gimple_reg (id->retvar))
4772 {
4773 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4774 gimple *clobber_stmt;
4775 TREE_THIS_VOLATILE (clobber) = 1;
4776 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4777 gimple_set_location (clobber_stmt, gimple_location (stmt));
4778 gsi_replace (&stmt_gsi, clobber_stmt, false);
4779 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
4780 }
4781 else
4782 gsi_remove (&stmt_gsi, true);
4783 }
4784
4785 if (purge_dead_abnormal_edges)
4786 {
4787 gimple_purge_dead_eh_edges (return_block);
4788 gimple_purge_dead_abnormal_call_edges (return_block);
4789 }
4790
4791 /* If the value of the new expression is ignored, that's OK. We
4792 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4793 the equivalent inlined version either. */
4794 if (is_gimple_assign (stmt))
4795 {
4796 gcc_assert (gimple_assign_single_p (stmt)
4797 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4798 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4799 }
4800
4801 id->assign_stmts.release ();
4802
4803 /* Output the inlining info for this abstract function, since it has been
4804 inlined. If we don't do this now, we can lose the information about the
4805 variables in the function when the blocks get blown away as soon as we
4806 remove the cgraph node. */
4807 if (gimple_block (stmt))
4808 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4809
4810 /* Update callgraph if needed. */
4811 cg_edge->callee->remove ();
4812
4813 id->block = NULL_TREE;
4814 id->retvar = NULL_TREE;
4815 id->retbnd = NULL_TREE;
4816 successfully_inlined = true;
4817
4818 egress:
4819 input_location = saved_location;
4820 return successfully_inlined;
4821 }
4822
4823 /* Expand call statements reachable from STMT_P.
4824 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4825 in a MODIFY_EXPR. */
4826
4827 static bool
4828 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4829 {
4830 gimple_stmt_iterator gsi;
4831 bool inlined = false;
4832
4833 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4834 {
4835 gimple *stmt = gsi_stmt (gsi);
4836 gsi_prev (&gsi);
4837
4838 if (is_gimple_call (stmt)
4839 && !gimple_call_internal_p (stmt))
4840 inlined |= expand_call_inline (bb, stmt, id);
4841 }
4842
4843 return inlined;
4844 }
4845
4846
4847 /* Walk all basic blocks created after FIRST and try to fold every statement
4848 in the STATEMENTS pointer set. */
4849
4850 static void
4851 fold_marked_statements (int first, hash_set<gimple *> *statements)
4852 {
4853 for (; first < n_basic_blocks_for_fn (cfun); first++)
4854 if (BASIC_BLOCK_FOR_FN (cfun, first))
4855 {
4856 gimple_stmt_iterator gsi;
4857
4858 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4859 !gsi_end_p (gsi);
4860 gsi_next (&gsi))
4861 if (statements->contains (gsi_stmt (gsi)))
4862 {
4863 gimple *old_stmt = gsi_stmt (gsi);
4864 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4865
4866 if (old_decl && DECL_BUILT_IN (old_decl))
4867 {
4868 /* Folding builtins can create multiple instructions,
4869 we need to look at all of them. */
4870 gimple_stmt_iterator i2 = gsi;
4871 gsi_prev (&i2);
4872 if (fold_stmt (&gsi))
4873 {
4874 gimple *new_stmt;
4875 /* If a builtin at the end of a bb folded into nothing,
4876 the following loop won't work. */
4877 if (gsi_end_p (gsi))
4878 {
4879 cgraph_update_edges_for_call_stmt (old_stmt,
4880 old_decl, NULL);
4881 break;
4882 }
4883 if (gsi_end_p (i2))
4884 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4885 else
4886 gsi_next (&i2);
4887 while (1)
4888 {
4889 new_stmt = gsi_stmt (i2);
4890 update_stmt (new_stmt);
4891 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4892 new_stmt);
4893
4894 if (new_stmt == gsi_stmt (gsi))
4895 {
4896 /* It is okay to check only for the very last
4897 of these statements. If it is a throwing
4898 statement nothing will change. If it isn't
4899 this can remove EH edges. If that weren't
4900 correct then because some intermediate stmts
4901 throw, but not the last one. That would mean
4902 we'd have to split the block, which we can't
4903 here and we'd loose anyway. And as builtins
4904 probably never throw, this all
4905 is mood anyway. */
4906 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4907 new_stmt))
4908 gimple_purge_dead_eh_edges (
4909 BASIC_BLOCK_FOR_FN (cfun, first));
4910 break;
4911 }
4912 gsi_next (&i2);
4913 }
4914 }
4915 }
4916 else if (fold_stmt (&gsi))
4917 {
4918 /* Re-read the statement from GSI as fold_stmt() may
4919 have changed it. */
4920 gimple *new_stmt = gsi_stmt (gsi);
4921 update_stmt (new_stmt);
4922
4923 if (is_gimple_call (old_stmt)
4924 || is_gimple_call (new_stmt))
4925 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4926 new_stmt);
4927
4928 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4929 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4930 first));
4931 }
4932 }
4933 }
4934 }
4935
4936 /* Expand calls to inline functions in the body of FN. */
4937
4938 unsigned int
4939 optimize_inline_calls (tree fn)
4940 {
4941 copy_body_data id;
4942 basic_block bb;
4943 int last = n_basic_blocks_for_fn (cfun);
4944 bool inlined_p = false;
4945
4946 /* Clear out ID. */
4947 memset (&id, 0, sizeof (id));
4948
4949 id.src_node = id.dst_node = cgraph_node::get (fn);
4950 gcc_assert (id.dst_node->definition);
4951 id.dst_fn = fn;
4952 /* Or any functions that aren't finished yet. */
4953 if (current_function_decl)
4954 id.dst_fn = current_function_decl;
4955
4956 id.copy_decl = copy_decl_maybe_to_var;
4957 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4958 id.transform_new_cfg = false;
4959 id.transform_return_to_modify = true;
4960 id.transform_parameter = true;
4961 id.transform_lang_insert_block = NULL;
4962 id.statements_to_fold = new hash_set<gimple *>;
4963
4964 push_gimplify_context ();
4965
4966 /* We make no attempts to keep dominance info up-to-date. */
4967 free_dominance_info (CDI_DOMINATORS);
4968 free_dominance_info (CDI_POST_DOMINATORS);
4969
4970 /* Register specific gimple functions. */
4971 gimple_register_cfg_hooks ();
4972
4973 /* Reach the trees by walking over the CFG, and note the
4974 enclosing basic-blocks in the call edges. */
4975 /* We walk the blocks going forward, because inlined function bodies
4976 will split id->current_basic_block, and the new blocks will
4977 follow it; we'll trudge through them, processing their CALL_EXPRs
4978 along the way. */
4979 FOR_EACH_BB_FN (bb, cfun)
4980 inlined_p |= gimple_expand_calls_inline (bb, &id);
4981
4982 pop_gimplify_context (NULL);
4983
4984 if (flag_checking)
4985 {
4986 struct cgraph_edge *e;
4987
4988 id.dst_node->verify ();
4989
4990 /* Double check that we inlined everything we are supposed to inline. */
4991 for (e = id.dst_node->callees; e; e = e->next_callee)
4992 gcc_assert (e->inline_failed);
4993 }
4994
4995 /* Fold queued statements. */
4996 update_max_bb_count ();
4997 fold_marked_statements (last, id.statements_to_fold);
4998 delete id.statements_to_fold;
4999
5000 gcc_assert (!id.debug_stmts.exists ());
5001
5002 /* If we didn't inline into the function there is nothing to do. */
5003 if (!inlined_p)
5004 return 0;
5005
5006 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5007 number_blocks (fn);
5008
5009 delete_unreachable_blocks_update_callgraph (&id);
5010 if (flag_checking)
5011 id.dst_node->verify ();
5012
5013 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5014 not possible yet - the IPA passes might make various functions to not
5015 throw and they don't care to proactively update local EH info. This is
5016 done later in fixup_cfg pass that also execute the verification. */
5017 return (TODO_update_ssa
5018 | TODO_cleanup_cfg
5019 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5020 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5021 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5022 ? TODO_rebuild_frequencies : 0));
5023 }
5024
5025 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5026
5027 tree
5028 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5029 {
5030 enum tree_code code = TREE_CODE (*tp);
5031 enum tree_code_class cl = TREE_CODE_CLASS (code);
5032
5033 /* We make copies of most nodes. */
5034 if (IS_EXPR_CODE_CLASS (cl)
5035 || code == TREE_LIST
5036 || code == TREE_VEC
5037 || code == TYPE_DECL
5038 || code == OMP_CLAUSE)
5039 {
5040 /* Because the chain gets clobbered when we make a copy, we save it
5041 here. */
5042 tree chain = NULL_TREE, new_tree;
5043
5044 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5045 chain = TREE_CHAIN (*tp);
5046
5047 /* Copy the node. */
5048 new_tree = copy_node (*tp);
5049
5050 *tp = new_tree;
5051
5052 /* Now, restore the chain, if appropriate. That will cause
5053 walk_tree to walk into the chain as well. */
5054 if (code == PARM_DECL
5055 || code == TREE_LIST
5056 || code == OMP_CLAUSE)
5057 TREE_CHAIN (*tp) = chain;
5058
5059 /* For now, we don't update BLOCKs when we make copies. So, we
5060 have to nullify all BIND_EXPRs. */
5061 if (TREE_CODE (*tp) == BIND_EXPR)
5062 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5063 }
5064 else if (code == CONSTRUCTOR)
5065 {
5066 /* CONSTRUCTOR nodes need special handling because
5067 we need to duplicate the vector of elements. */
5068 tree new_tree;
5069
5070 new_tree = copy_node (*tp);
5071 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5072 *tp = new_tree;
5073 }
5074 else if (code == STATEMENT_LIST)
5075 /* We used to just abort on STATEMENT_LIST, but we can run into them
5076 with statement-expressions (c++/40975). */
5077 copy_statement_list (tp);
5078 else if (TREE_CODE_CLASS (code) == tcc_type)
5079 *walk_subtrees = 0;
5080 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5081 *walk_subtrees = 0;
5082 else if (TREE_CODE_CLASS (code) == tcc_constant)
5083 *walk_subtrees = 0;
5084 return NULL_TREE;
5085 }
5086
5087 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5088 information indicating to what new SAVE_EXPR this one should be mapped,
5089 use that one. Otherwise, create a new node and enter it in ST. FN is
5090 the function into which the copy will be placed. */
5091
5092 static void
5093 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5094 {
5095 tree *n;
5096 tree t;
5097
5098 /* See if we already encountered this SAVE_EXPR. */
5099 n = st->get (*tp);
5100
5101 /* If we didn't already remap this SAVE_EXPR, do so now. */
5102 if (!n)
5103 {
5104 t = copy_node (*tp);
5105
5106 /* Remember this SAVE_EXPR. */
5107 st->put (*tp, t);
5108 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5109 st->put (t, t);
5110 }
5111 else
5112 {
5113 /* We've already walked into this SAVE_EXPR; don't do it again. */
5114 *walk_subtrees = 0;
5115 t = *n;
5116 }
5117
5118 /* Replace this SAVE_EXPR with the copy. */
5119 *tp = t;
5120 }
5121
5122 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5123 label, copies the declaration and enters it in the splay_tree in DATA (which
5124 is really a 'copy_body_data *'. */
5125
5126 static tree
5127 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5128 bool *handled_ops_p ATTRIBUTE_UNUSED,
5129 struct walk_stmt_info *wi)
5130 {
5131 copy_body_data *id = (copy_body_data *) wi->info;
5132 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5133
5134 if (stmt)
5135 {
5136 tree decl = gimple_label_label (stmt);
5137
5138 /* Copy the decl and remember the copy. */
5139 insert_decl_map (id, decl, id->copy_decl (decl, id));
5140 }
5141
5142 return NULL_TREE;
5143 }
5144
5145 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5146 struct walk_stmt_info *wi);
5147
5148 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5149 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5150 remaps all local declarations to appropriate replacements in gimple
5151 operands. */
5152
5153 static tree
5154 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5155 {
5156 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5157 copy_body_data *id = (copy_body_data *) wi->info;
5158 hash_map<tree, tree> *st = id->decl_map;
5159 tree *n;
5160 tree expr = *tp;
5161
5162 /* For recursive invocations this is no longer the LHS itself. */
5163 bool is_lhs = wi->is_lhs;
5164 wi->is_lhs = false;
5165
5166 if (TREE_CODE (expr) == SSA_NAME)
5167 {
5168 *tp = remap_ssa_name (*tp, id);
5169 *walk_subtrees = 0;
5170 if (is_lhs)
5171 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5172 }
5173 /* Only a local declaration (variable or label). */
5174 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5175 || TREE_CODE (expr) == LABEL_DECL)
5176 {
5177 /* Lookup the declaration. */
5178 n = st->get (expr);
5179
5180 /* If it's there, remap it. */
5181 if (n)
5182 *tp = *n;
5183 *walk_subtrees = 0;
5184 }
5185 else if (TREE_CODE (expr) == STATEMENT_LIST
5186 || TREE_CODE (expr) == BIND_EXPR
5187 || TREE_CODE (expr) == SAVE_EXPR)
5188 gcc_unreachable ();
5189 else if (TREE_CODE (expr) == TARGET_EXPR)
5190 {
5191 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5192 It's OK for this to happen if it was part of a subtree that
5193 isn't immediately expanded, such as operand 2 of another
5194 TARGET_EXPR. */
5195 if (!TREE_OPERAND (expr, 1))
5196 {
5197 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5198 TREE_OPERAND (expr, 3) = NULL_TREE;
5199 }
5200 }
5201 else if (TREE_CODE (expr) == OMP_CLAUSE)
5202 {
5203 /* Before the omplower pass completes, some OMP clauses can contain
5204 sequences that are neither copied by gimple_seq_copy nor walked by
5205 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5206 in those situations, we have to copy and process them explicitely. */
5207
5208 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5209 {
5210 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5211 seq = duplicate_remap_omp_clause_seq (seq, wi);
5212 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5213 }
5214 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5215 {
5216 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5217 seq = duplicate_remap_omp_clause_seq (seq, wi);
5218 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5219 }
5220 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5221 {
5222 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5223 seq = duplicate_remap_omp_clause_seq (seq, wi);
5224 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5225 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5226 seq = duplicate_remap_omp_clause_seq (seq, wi);
5227 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5228 }
5229 }
5230
5231 /* Keep iterating. */
5232 return NULL_TREE;
5233 }
5234
5235
5236 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5237 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5238 remaps all local declarations to appropriate replacements in gimple
5239 statements. */
5240
5241 static tree
5242 replace_locals_stmt (gimple_stmt_iterator *gsip,
5243 bool *handled_ops_p ATTRIBUTE_UNUSED,
5244 struct walk_stmt_info *wi)
5245 {
5246 copy_body_data *id = (copy_body_data *) wi->info;
5247 gimple *gs = gsi_stmt (*gsip);
5248
5249 if (gbind *stmt = dyn_cast <gbind *> (gs))
5250 {
5251 tree block = gimple_bind_block (stmt);
5252
5253 if (block)
5254 {
5255 remap_block (&block, id);
5256 gimple_bind_set_block (stmt, block);
5257 }
5258
5259 /* This will remap a lot of the same decls again, but this should be
5260 harmless. */
5261 if (gimple_bind_vars (stmt))
5262 {
5263 tree old_var, decls = gimple_bind_vars (stmt);
5264
5265 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5266 if (!can_be_nonlocal (old_var, id)
5267 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5268 remap_decl (old_var, id);
5269
5270 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5271 id->prevent_decl_creation_for_types = true;
5272 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5273 id->prevent_decl_creation_for_types = false;
5274 }
5275 }
5276
5277 /* Keep iterating. */
5278 return NULL_TREE;
5279 }
5280
5281 /* Create a copy of SEQ and remap all decls in it. */
5282
5283 static gimple_seq
5284 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5285 {
5286 if (!seq)
5287 return NULL;
5288
5289 /* If there are any labels in OMP sequences, they can be only referred to in
5290 the sequence itself and therefore we can do both here. */
5291 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5292 gimple_seq copy = gimple_seq_copy (seq);
5293 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5294 return copy;
5295 }
5296
5297 /* Copies everything in SEQ and replaces variables and labels local to
5298 current_function_decl. */
5299
5300 gimple_seq
5301 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5302 {
5303 copy_body_data id;
5304 struct walk_stmt_info wi;
5305 gimple_seq copy;
5306
5307 /* There's nothing to do for NULL_TREE. */
5308 if (seq == NULL)
5309 return seq;
5310
5311 /* Set up ID. */
5312 memset (&id, 0, sizeof (id));
5313 id.src_fn = current_function_decl;
5314 id.dst_fn = current_function_decl;
5315 id.src_cfun = cfun;
5316 id.decl_map = new hash_map<tree, tree>;
5317 id.debug_map = NULL;
5318
5319 id.copy_decl = copy_decl_no_change;
5320 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5321 id.transform_new_cfg = false;
5322 id.transform_return_to_modify = false;
5323 id.transform_parameter = false;
5324 id.transform_lang_insert_block = NULL;
5325
5326 /* Walk the tree once to find local labels. */
5327 memset (&wi, 0, sizeof (wi));
5328 hash_set<tree> visited;
5329 wi.info = &id;
5330 wi.pset = &visited;
5331 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5332
5333 copy = gimple_seq_copy (seq);
5334
5335 /* Walk the copy, remapping decls. */
5336 memset (&wi, 0, sizeof (wi));
5337 wi.info = &id;
5338 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5339
5340 /* Clean up. */
5341 delete id.decl_map;
5342 if (id.debug_map)
5343 delete id.debug_map;
5344 if (id.dependence_map)
5345 {
5346 delete id.dependence_map;
5347 id.dependence_map = NULL;
5348 }
5349
5350 return copy;
5351 }
5352
5353
5354 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5355
5356 static tree
5357 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5358 {
5359 if (*tp == data)
5360 return (tree) data;
5361 else
5362 return NULL;
5363 }
5364
5365 DEBUG_FUNCTION bool
5366 debug_find_tree (tree top, tree search)
5367 {
5368 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5369 }
5370
5371
5372 /* Declare the variables created by the inliner. Add all the variables in
5373 VARS to BIND_EXPR. */
5374
5375 static void
5376 declare_inline_vars (tree block, tree vars)
5377 {
5378 tree t;
5379 for (t = vars; t; t = DECL_CHAIN (t))
5380 {
5381 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5382 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5383 add_local_decl (cfun, t);
5384 }
5385
5386 if (block)
5387 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5388 }
5389
5390 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5391 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5392 VAR_DECL translation. */
5393
5394 tree
5395 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5396 {
5397 /* Don't generate debug information for the copy if we wouldn't have
5398 generated it for the copy either. */
5399 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5400 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5401
5402 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5403 declaration inspired this copy. */
5404 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5405
5406 /* The new variable/label has no RTL, yet. */
5407 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5408 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5409 SET_DECL_RTL (copy, 0);
5410
5411 /* These args would always appear unused, if not for this. */
5412 TREE_USED (copy) = 1;
5413
5414 /* Set the context for the new declaration. */
5415 if (!DECL_CONTEXT (decl))
5416 /* Globals stay global. */
5417 ;
5418 else if (DECL_CONTEXT (decl) != id->src_fn)
5419 /* Things that weren't in the scope of the function we're inlining
5420 from aren't in the scope we're inlining to, either. */
5421 ;
5422 else if (TREE_STATIC (decl))
5423 /* Function-scoped static variables should stay in the original
5424 function. */
5425 ;
5426 else
5427 {
5428 /* Ordinary automatic local variables are now in the scope of the
5429 new function. */
5430 DECL_CONTEXT (copy) = id->dst_fn;
5431 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5432 {
5433 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5434 DECL_ATTRIBUTES (copy)
5435 = tree_cons (get_identifier ("omp simt private"), NULL,
5436 DECL_ATTRIBUTES (copy));
5437 id->dst_simt_vars->safe_push (copy);
5438 }
5439 }
5440
5441 return copy;
5442 }
5443
5444 static tree
5445 copy_decl_to_var (tree decl, copy_body_data *id)
5446 {
5447 tree copy, type;
5448
5449 gcc_assert (TREE_CODE (decl) == PARM_DECL
5450 || TREE_CODE (decl) == RESULT_DECL);
5451
5452 type = TREE_TYPE (decl);
5453
5454 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5455 VAR_DECL, DECL_NAME (decl), type);
5456 if (DECL_PT_UID_SET_P (decl))
5457 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5458 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5459 TREE_READONLY (copy) = TREE_READONLY (decl);
5460 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5461 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5462
5463 return copy_decl_for_dup_finish (id, decl, copy);
5464 }
5465
5466 /* Like copy_decl_to_var, but create a return slot object instead of a
5467 pointer variable for return by invisible reference. */
5468
5469 static tree
5470 copy_result_decl_to_var (tree decl, copy_body_data *id)
5471 {
5472 tree copy, type;
5473
5474 gcc_assert (TREE_CODE (decl) == PARM_DECL
5475 || TREE_CODE (decl) == RESULT_DECL);
5476
5477 type = TREE_TYPE (decl);
5478 if (DECL_BY_REFERENCE (decl))
5479 type = TREE_TYPE (type);
5480
5481 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5482 VAR_DECL, DECL_NAME (decl), type);
5483 if (DECL_PT_UID_SET_P (decl))
5484 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5485 TREE_READONLY (copy) = TREE_READONLY (decl);
5486 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5487 if (!DECL_BY_REFERENCE (decl))
5488 {
5489 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5490 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5491 }
5492
5493 return copy_decl_for_dup_finish (id, decl, copy);
5494 }
5495
5496 tree
5497 copy_decl_no_change (tree decl, copy_body_data *id)
5498 {
5499 tree copy;
5500
5501 copy = copy_node (decl);
5502
5503 /* The COPY is not abstract; it will be generated in DST_FN. */
5504 DECL_ABSTRACT_P (copy) = false;
5505 lang_hooks.dup_lang_specific_decl (copy);
5506
5507 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5508 been taken; it's for internal bookkeeping in expand_goto_internal. */
5509 if (TREE_CODE (copy) == LABEL_DECL)
5510 {
5511 TREE_ADDRESSABLE (copy) = 0;
5512 LABEL_DECL_UID (copy) = -1;
5513 }
5514
5515 return copy_decl_for_dup_finish (id, decl, copy);
5516 }
5517
5518 static tree
5519 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5520 {
5521 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5522 return copy_decl_to_var (decl, id);
5523 else
5524 return copy_decl_no_change (decl, id);
5525 }
5526
5527 /* Return a copy of the function's argument tree. */
5528 static tree
5529 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5530 bitmap args_to_skip, tree *vars)
5531 {
5532 tree arg, *parg;
5533 tree new_parm = NULL;
5534 int i = 0;
5535
5536 parg = &new_parm;
5537
5538 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5539 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5540 {
5541 tree new_tree = remap_decl (arg, id);
5542 if (TREE_CODE (new_tree) != PARM_DECL)
5543 new_tree = id->copy_decl (arg, id);
5544 lang_hooks.dup_lang_specific_decl (new_tree);
5545 *parg = new_tree;
5546 parg = &DECL_CHAIN (new_tree);
5547 }
5548 else if (!id->decl_map->get (arg))
5549 {
5550 /* Make an equivalent VAR_DECL. If the argument was used
5551 as temporary variable later in function, the uses will be
5552 replaced by local variable. */
5553 tree var = copy_decl_to_var (arg, id);
5554 insert_decl_map (id, arg, var);
5555 /* Declare this new variable. */
5556 DECL_CHAIN (var) = *vars;
5557 *vars = var;
5558 }
5559 return new_parm;
5560 }
5561
5562 /* Return a copy of the function's static chain. */
5563 static tree
5564 copy_static_chain (tree static_chain, copy_body_data * id)
5565 {
5566 tree *chain_copy, *pvar;
5567
5568 chain_copy = &static_chain;
5569 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5570 {
5571 tree new_tree = remap_decl (*pvar, id);
5572 lang_hooks.dup_lang_specific_decl (new_tree);
5573 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5574 *pvar = new_tree;
5575 }
5576 return static_chain;
5577 }
5578
5579 /* Return true if the function is allowed to be versioned.
5580 This is a guard for the versioning functionality. */
5581
5582 bool
5583 tree_versionable_function_p (tree fndecl)
5584 {
5585 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5586 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5587 }
5588
5589 /* Delete all unreachable basic blocks and update callgraph.
5590 Doing so is somewhat nontrivial because we need to update all clones and
5591 remove inline function that become unreachable. */
5592
5593 static bool
5594 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5595 {
5596 bool changed = false;
5597 basic_block b, next_bb;
5598
5599 find_unreachable_blocks ();
5600
5601 /* Delete all unreachable basic blocks. */
5602
5603 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5604 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5605 {
5606 next_bb = b->next_bb;
5607
5608 if (!(b->flags & BB_REACHABLE))
5609 {
5610 gimple_stmt_iterator bsi;
5611
5612 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5613 {
5614 struct cgraph_edge *e;
5615 struct cgraph_node *node;
5616
5617 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5618
5619 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5620 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5621 {
5622 if (!e->inline_failed)
5623 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5624 else
5625 e->remove ();
5626 }
5627 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5628 && id->dst_node->clones)
5629 for (node = id->dst_node->clones; node != id->dst_node;)
5630 {
5631 node->remove_stmt_references (gsi_stmt (bsi));
5632 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5633 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5634 {
5635 if (!e->inline_failed)
5636 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5637 else
5638 e->remove ();
5639 }
5640
5641 if (node->clones)
5642 node = node->clones;
5643 else if (node->next_sibling_clone)
5644 node = node->next_sibling_clone;
5645 else
5646 {
5647 while (node != id->dst_node && !node->next_sibling_clone)
5648 node = node->clone_of;
5649 if (node != id->dst_node)
5650 node = node->next_sibling_clone;
5651 }
5652 }
5653 }
5654 delete_basic_block (b);
5655 changed = true;
5656 }
5657 }
5658
5659 return changed;
5660 }
5661
5662 /* Update clone info after duplication. */
5663
5664 static void
5665 update_clone_info (copy_body_data * id)
5666 {
5667 struct cgraph_node *node;
5668 if (!id->dst_node->clones)
5669 return;
5670 for (node = id->dst_node->clones; node != id->dst_node;)
5671 {
5672 /* First update replace maps to match the new body. */
5673 if (node->clone.tree_map)
5674 {
5675 unsigned int i;
5676 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5677 {
5678 struct ipa_replace_map *replace_info;
5679 replace_info = (*node->clone.tree_map)[i];
5680 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5681 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5682 }
5683 }
5684 if (node->clones)
5685 node = node->clones;
5686 else if (node->next_sibling_clone)
5687 node = node->next_sibling_clone;
5688 else
5689 {
5690 while (node != id->dst_node && !node->next_sibling_clone)
5691 node = node->clone_of;
5692 if (node != id->dst_node)
5693 node = node->next_sibling_clone;
5694 }
5695 }
5696 }
5697
5698 /* Create a copy of a function's tree.
5699 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5700 of the original function and the new copied function
5701 respectively. In case we want to replace a DECL
5702 tree with another tree while duplicating the function's
5703 body, TREE_MAP represents the mapping between these
5704 trees. If UPDATE_CLONES is set, the call_stmt fields
5705 of edges of clones of the function will be updated.
5706
5707 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5708 from new version.
5709 If SKIP_RETURN is true, the new version will return void.
5710 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5711 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5712 */
5713 void
5714 tree_function_versioning (tree old_decl, tree new_decl,
5715 vec<ipa_replace_map *, va_gc> *tree_map,
5716 bool update_clones, bitmap args_to_skip,
5717 bool skip_return, bitmap blocks_to_copy,
5718 basic_block new_entry)
5719 {
5720 struct cgraph_node *old_version_node;
5721 struct cgraph_node *new_version_node;
5722 copy_body_data id;
5723 tree p;
5724 unsigned i;
5725 struct ipa_replace_map *replace_info;
5726 basic_block old_entry_block, bb;
5727 auto_vec<gimple *, 10> init_stmts;
5728 tree vars = NULL_TREE;
5729 bitmap debug_args_to_skip = args_to_skip;
5730
5731 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5732 && TREE_CODE (new_decl) == FUNCTION_DECL);
5733 DECL_POSSIBLY_INLINED (old_decl) = 1;
5734
5735 old_version_node = cgraph_node::get (old_decl);
5736 gcc_checking_assert (old_version_node);
5737 new_version_node = cgraph_node::get (new_decl);
5738 gcc_checking_assert (new_version_node);
5739
5740 /* Copy over debug args. */
5741 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5742 {
5743 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5744 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5745 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5746 old_debug_args = decl_debug_args_lookup (old_decl);
5747 if (old_debug_args)
5748 {
5749 new_debug_args = decl_debug_args_insert (new_decl);
5750 *new_debug_args = vec_safe_copy (*old_debug_args);
5751 }
5752 }
5753
5754 /* Output the inlining info for this abstract function, since it has been
5755 inlined. If we don't do this now, we can lose the information about the
5756 variables in the function when the blocks get blown away as soon as we
5757 remove the cgraph node. */
5758 (*debug_hooks->outlining_inline_function) (old_decl);
5759
5760 DECL_ARTIFICIAL (new_decl) = 1;
5761 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5762 if (DECL_ORIGIN (old_decl) == old_decl)
5763 old_version_node->used_as_abstract_origin = true;
5764 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5765
5766 /* Prepare the data structures for the tree copy. */
5767 memset (&id, 0, sizeof (id));
5768
5769 /* Generate a new name for the new version. */
5770 id.statements_to_fold = new hash_set<gimple *>;
5771
5772 id.decl_map = new hash_map<tree, tree>;
5773 id.debug_map = NULL;
5774 id.src_fn = old_decl;
5775 id.dst_fn = new_decl;
5776 id.src_node = old_version_node;
5777 id.dst_node = new_version_node;
5778 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5779 id.blocks_to_copy = blocks_to_copy;
5780
5781 id.copy_decl = copy_decl_no_change;
5782 id.transform_call_graph_edges
5783 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5784 id.transform_new_cfg = true;
5785 id.transform_return_to_modify = false;
5786 id.transform_parameter = false;
5787 id.transform_lang_insert_block = NULL;
5788
5789 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5790 (DECL_STRUCT_FUNCTION (old_decl));
5791 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5792 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5793 initialize_cfun (new_decl, old_decl,
5794 new_entry ? new_entry->count : old_entry_block->count);
5795 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5796 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5797 = id.src_cfun->gimple_df->ipa_pta;
5798
5799 /* Copy the function's static chain. */
5800 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5801 if (p)
5802 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5803 = copy_static_chain (p, &id);
5804
5805 /* If there's a tree_map, prepare for substitution. */
5806 if (tree_map)
5807 for (i = 0; i < tree_map->length (); i++)
5808 {
5809 gimple *init;
5810 replace_info = (*tree_map)[i];
5811 if (replace_info->replace_p)
5812 {
5813 int parm_num = -1;
5814 if (!replace_info->old_tree)
5815 {
5816 int p = replace_info->parm_num;
5817 tree parm;
5818 tree req_type, new_type;
5819
5820 for (parm = DECL_ARGUMENTS (old_decl); p;
5821 parm = DECL_CHAIN (parm))
5822 p--;
5823 replace_info->old_tree = parm;
5824 parm_num = replace_info->parm_num;
5825 req_type = TREE_TYPE (parm);
5826 new_type = TREE_TYPE (replace_info->new_tree);
5827 if (!useless_type_conversion_p (req_type, new_type))
5828 {
5829 if (fold_convertible_p (req_type, replace_info->new_tree))
5830 replace_info->new_tree
5831 = fold_build1 (NOP_EXPR, req_type,
5832 replace_info->new_tree);
5833 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5834 replace_info->new_tree
5835 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
5836 replace_info->new_tree);
5837 else
5838 {
5839 if (dump_file)
5840 {
5841 fprintf (dump_file, " const ");
5842 print_generic_expr (dump_file,
5843 replace_info->new_tree);
5844 fprintf (dump_file,
5845 " can't be converted to param ");
5846 print_generic_expr (dump_file, parm);
5847 fprintf (dump_file, "\n");
5848 }
5849 replace_info->old_tree = NULL;
5850 }
5851 }
5852 }
5853 else
5854 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5855 if (replace_info->old_tree)
5856 {
5857 init = setup_one_parameter (&id, replace_info->old_tree,
5858 replace_info->new_tree, id.src_fn,
5859 NULL,
5860 &vars);
5861 if (init)
5862 init_stmts.safe_push (init);
5863 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
5864 {
5865 if (parm_num == -1)
5866 {
5867 tree parm;
5868 int p;
5869 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
5870 parm = DECL_CHAIN (parm), p++)
5871 if (parm == replace_info->old_tree)
5872 {
5873 parm_num = p;
5874 break;
5875 }
5876 }
5877 if (parm_num != -1)
5878 {
5879 if (debug_args_to_skip == args_to_skip)
5880 {
5881 debug_args_to_skip = BITMAP_ALLOC (NULL);
5882 bitmap_copy (debug_args_to_skip, args_to_skip);
5883 }
5884 bitmap_clear_bit (debug_args_to_skip, parm_num);
5885 }
5886 }
5887 }
5888 }
5889 }
5890 /* Copy the function's arguments. */
5891 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5892 DECL_ARGUMENTS (new_decl)
5893 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5894 args_to_skip, &vars);
5895
5896 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5897 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5898
5899 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5900
5901 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5902 /* Add local vars. */
5903 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5904
5905 if (DECL_RESULT (old_decl) == NULL_TREE)
5906 ;
5907 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5908 {
5909 DECL_RESULT (new_decl)
5910 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5911 RESULT_DECL, NULL_TREE, void_type_node);
5912 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5913 cfun->returns_struct = 0;
5914 cfun->returns_pcc_struct = 0;
5915 }
5916 else
5917 {
5918 tree old_name;
5919 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5920 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5921 if (gimple_in_ssa_p (id.src_cfun)
5922 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5923 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5924 {
5925 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5926 insert_decl_map (&id, old_name, new_name);
5927 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5928 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5929 }
5930 }
5931
5932 /* Set up the destination functions loop tree. */
5933 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5934 {
5935 cfun->curr_properties &= ~PROP_loops;
5936 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5937 cfun->curr_properties |= PROP_loops;
5938 }
5939
5940 /* Copy the Function's body. */
5941 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5942 new_entry);
5943
5944 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5945 number_blocks (new_decl);
5946
5947 /* We want to create the BB unconditionally, so that the addition of
5948 debug stmts doesn't affect BB count, which may in the end cause
5949 codegen differences. */
5950 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5951 while (init_stmts.length ())
5952 insert_init_stmt (&id, bb, init_stmts.pop ());
5953 update_clone_info (&id);
5954
5955 /* Remap the nonlocal_goto_save_area, if any. */
5956 if (cfun->nonlocal_goto_save_area)
5957 {
5958 struct walk_stmt_info wi;
5959
5960 memset (&wi, 0, sizeof (wi));
5961 wi.info = &id;
5962 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5963 }
5964
5965 /* Clean up. */
5966 delete id.decl_map;
5967 if (id.debug_map)
5968 delete id.debug_map;
5969 free_dominance_info (CDI_DOMINATORS);
5970 free_dominance_info (CDI_POST_DOMINATORS);
5971
5972 update_max_bb_count ();
5973 fold_marked_statements (0, id.statements_to_fold);
5974 delete id.statements_to_fold;
5975 delete_unreachable_blocks_update_callgraph (&id);
5976 if (id.dst_node->definition)
5977 cgraph_edge::rebuild_references ();
5978 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5979 {
5980 calculate_dominance_info (CDI_DOMINATORS);
5981 fix_loop_structure (NULL);
5982 }
5983 update_ssa (TODO_update_ssa);
5984
5985 /* After partial cloning we need to rescale frequencies, so they are
5986 within proper range in the cloned function. */
5987 if (new_entry)
5988 {
5989 struct cgraph_edge *e;
5990 rebuild_frequencies ();
5991
5992 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5993 for (e = new_version_node->callees; e; e = e->next_callee)
5994 {
5995 basic_block bb = gimple_bb (e->call_stmt);
5996 e->count = bb->count;
5997 }
5998 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5999 {
6000 basic_block bb = gimple_bb (e->call_stmt);
6001 e->count = bb->count;
6002 }
6003 }
6004
6005 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6006 {
6007 tree parm;
6008 vec<tree, va_gc> **debug_args = NULL;
6009 unsigned int len = 0;
6010 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6011 parm; parm = DECL_CHAIN (parm), i++)
6012 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6013 {
6014 tree ddecl;
6015
6016 if (debug_args == NULL)
6017 {
6018 debug_args = decl_debug_args_insert (new_decl);
6019 len = vec_safe_length (*debug_args);
6020 }
6021 ddecl = make_node (DEBUG_EXPR_DECL);
6022 DECL_ARTIFICIAL (ddecl) = 1;
6023 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6024 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6025 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6026 vec_safe_push (*debug_args, ddecl);
6027 }
6028 if (debug_args != NULL)
6029 {
6030 /* On the callee side, add
6031 DEBUG D#Y s=> parm
6032 DEBUG var => D#Y
6033 stmts to the first bb where var is a VAR_DECL created for the
6034 optimized away parameter in DECL_INITIAL block. This hints
6035 in the debug info that var (whole DECL_ORIGIN is the parm
6036 PARM_DECL) is optimized away, but could be looked up at the
6037 call site as value of D#X there. */
6038 tree var = vars, vexpr;
6039 gimple_stmt_iterator cgsi
6040 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6041 gimple *def_temp;
6042 var = vars;
6043 i = vec_safe_length (*debug_args);
6044 do
6045 {
6046 i -= 2;
6047 while (var != NULL_TREE
6048 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6049 var = TREE_CHAIN (var);
6050 if (var == NULL_TREE)
6051 break;
6052 vexpr = make_node (DEBUG_EXPR_DECL);
6053 parm = (**debug_args)[i];
6054 DECL_ARTIFICIAL (vexpr) = 1;
6055 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6056 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6057 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6058 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6059 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6060 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6061 }
6062 while (i > len);
6063 }
6064 }
6065
6066 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6067 BITMAP_FREE (debug_args_to_skip);
6068 free_dominance_info (CDI_DOMINATORS);
6069 free_dominance_info (CDI_POST_DOMINATORS);
6070
6071 gcc_assert (!id.debug_stmts.exists ());
6072 pop_cfun ();
6073 return;
6074 }
6075
6076 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6077 the callee and return the inlined body on success. */
6078
6079 tree
6080 maybe_inline_call_in_expr (tree exp)
6081 {
6082 tree fn = get_callee_fndecl (exp);
6083
6084 /* We can only try to inline "const" functions. */
6085 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6086 {
6087 call_expr_arg_iterator iter;
6088 copy_body_data id;
6089 tree param, arg, t;
6090 hash_map<tree, tree> decl_map;
6091
6092 /* Remap the parameters. */
6093 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6094 param;
6095 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6096 decl_map.put (param, arg);
6097
6098 memset (&id, 0, sizeof (id));
6099 id.src_fn = fn;
6100 id.dst_fn = current_function_decl;
6101 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6102 id.decl_map = &decl_map;
6103
6104 id.copy_decl = copy_decl_no_change;
6105 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6106 id.transform_new_cfg = false;
6107 id.transform_return_to_modify = true;
6108 id.transform_parameter = true;
6109 id.transform_lang_insert_block = NULL;
6110
6111 /* Make sure not to unshare trees behind the front-end's back
6112 since front-end specific mechanisms may rely on sharing. */
6113 id.regimplify = false;
6114 id.do_not_unshare = true;
6115
6116 /* We're not inside any EH region. */
6117 id.eh_lp_nr = 0;
6118
6119 t = copy_tree_body (&id);
6120
6121 /* We can only return something suitable for use in a GENERIC
6122 expression tree. */
6123 if (TREE_CODE (t) == MODIFY_EXPR)
6124 return TREE_OPERAND (t, 1);
6125 }
6126
6127 return NULL_TREE;
6128 }
6129
6130 /* Duplicate a type, fields and all. */
6131
6132 tree
6133 build_duplicate_type (tree type)
6134 {
6135 struct copy_body_data id;
6136
6137 memset (&id, 0, sizeof (id));
6138 id.src_fn = current_function_decl;
6139 id.dst_fn = current_function_decl;
6140 id.src_cfun = cfun;
6141 id.decl_map = new hash_map<tree, tree>;
6142 id.debug_map = NULL;
6143 id.copy_decl = copy_decl_no_change;
6144
6145 type = remap_type_1 (type, &id);
6146
6147 delete id.decl_map;
6148 if (id.debug_map)
6149 delete id.debug_map;
6150
6151 TYPE_CANONICAL (type) = type;
6152
6153 return type;
6154 }
6155
6156 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6157 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6158 evaluation. */
6159
6160 tree
6161 copy_fn (tree fn, tree& parms, tree& result)
6162 {
6163 copy_body_data id;
6164 tree param;
6165 hash_map<tree, tree> decl_map;
6166
6167 tree *p = &parms;
6168 *p = NULL_TREE;
6169
6170 memset (&id, 0, sizeof (id));
6171 id.src_fn = fn;
6172 id.dst_fn = current_function_decl;
6173 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6174 id.decl_map = &decl_map;
6175
6176 id.copy_decl = copy_decl_no_change;
6177 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6178 id.transform_new_cfg = false;
6179 id.transform_return_to_modify = false;
6180 id.transform_parameter = true;
6181 id.transform_lang_insert_block = NULL;
6182
6183 /* Make sure not to unshare trees behind the front-end's back
6184 since front-end specific mechanisms may rely on sharing. */
6185 id.regimplify = false;
6186 id.do_not_unshare = true;
6187
6188 /* We're not inside any EH region. */
6189 id.eh_lp_nr = 0;
6190
6191 /* Remap the parameters and result and return them to the caller. */
6192 for (param = DECL_ARGUMENTS (fn);
6193 param;
6194 param = DECL_CHAIN (param))
6195 {
6196 *p = remap_decl (param, &id);
6197 p = &DECL_CHAIN (*p);
6198 }
6199
6200 if (DECL_RESULT (fn))
6201 result = remap_decl (DECL_RESULT (fn), &id);
6202 else
6203 result = NULL_TREE;
6204
6205 return copy_tree_body (&id);
6206 }