re PR tree-optimization/87609 (miscompilation with restrict and loop)
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63 #include "tree-cfgcleanup.h"
64
65 /* I'm not real happy about this, but we need to handle gimple and
66 non-gimple trees. */
67
68 /* Inlining, Cloning, Versioning, Parallelization
69
70 Inlining: a function body is duplicated, but the PARM_DECLs are
71 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72 MODIFY_EXPRs that store to a dedicated returned-value variable.
73 The duplicated eh_region info of the copy will later be appended
74 to the info for the caller; the eh_region info in copied throwing
75 statements and RESX statements are adjusted accordingly.
76
77 Cloning: (only in C++) We have one body for a con/de/structor, and
78 multiple function decls, each with a unique parameter list.
79 Duplicate the body, using the given splay tree; some parameters
80 will become constants (like 0 or 1).
81
82 Versioning: a function body is duplicated and the result is a new
83 function rather than into blocks of an existing function as with
84 inlining. Some parameters will become constants.
85
86 Parallelization: a region of a function is duplicated resulting in
87 a new function. Variables may be replaced with complex expressions
88 to enable shared variable semantics.
89
90 All of these will simultaneously lookup any callgraph edges. If
91 we're going to inline the duplicated function body, and the given
92 function has some cloned callgraph nodes (one for each place this
93 function will be inlined) those callgraph edges will be duplicated.
94 If we're cloning the body, those callgraph edges will be
95 updated to point into the new body. (Note that the original
96 callgraph node and edge list will not be altered.)
97
98 See the CALL_EXPR handling case in copy_tree_body_r (). */
99
100 /* To Do:
101
102 o In order to make inlining-on-trees work, we pessimized
103 function-local static constants. In particular, they are now
104 always output, even when not addressed. Fix this by treating
105 function-local static constants just like global static
106 constants; the back-end already knows not to output them if they
107 are not needed.
108
109 o Provide heuristics to clamp inlining of recursive template
110 calls? */
111
112
113 /* Weights that estimate_num_insns uses to estimate the size of the
114 produced code. */
115
116 eni_weights eni_size_weights;
117
118 /* Weights that estimate_num_insns uses to estimate the time necessary
119 to execute the produced code. */
120
121 eni_weights eni_time_weights;
122
123 /* Prototypes. */
124
125 static tree declare_return_variable (copy_body_data *, tree, tree,
126 basic_block);
127 static void remap_block (tree *, copy_body_data *);
128 static void copy_bind_expr (tree *, int *, copy_body_data *);
129 static void declare_inline_vars (tree, tree);
130 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131 static void prepend_lexical_block (tree current_block, tree new_block);
132 static tree copy_decl_to_var (tree, copy_body_data *);
133 static tree copy_result_decl_to_var (tree, copy_body_data *);
134 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
135 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
136 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
137
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
140
141 void
142 insert_decl_map (copy_body_data *id, tree key, tree value)
143 {
144 id->decl_map->put (key, value);
145
146 /* Always insert an identity map as well. If we see this same new
147 node again, we won't want to duplicate it a second time. */
148 if (key != value)
149 id->decl_map->put (value, value);
150 }
151
152 /* Insert a tree->tree mapping for ID. This is only used for
153 variables. */
154
155 static void
156 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
157 {
158 if (!gimple_in_ssa_p (id->src_cfun))
159 return;
160
161 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
162 return;
163
164 if (!target_for_debug_bind (key))
165 return;
166
167 gcc_assert (TREE_CODE (key) == PARM_DECL);
168 gcc_assert (VAR_P (value));
169
170 if (!id->debug_map)
171 id->debug_map = new hash_map<tree, tree>;
172
173 id->debug_map->put (key, value);
174 }
175
176 /* If nonzero, we're remapping the contents of inlined debug
177 statements. If negative, an error has occurred, such as a
178 reference to a variable that isn't available in the inlined
179 context. */
180 static int processing_debug_stmt = 0;
181
182 /* Construct new SSA name for old NAME. ID is the inline context. */
183
184 static tree
185 remap_ssa_name (tree name, copy_body_data *id)
186 {
187 tree new_tree, var;
188 tree *n;
189
190 gcc_assert (TREE_CODE (name) == SSA_NAME);
191
192 n = id->decl_map->get (name);
193 if (n)
194 return unshare_expr (*n);
195
196 if (processing_debug_stmt)
197 {
198 if (SSA_NAME_IS_DEFAULT_DEF (name)
199 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
200 && id->entry_bb == NULL
201 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
202 {
203 tree vexpr = make_node (DEBUG_EXPR_DECL);
204 gimple *def_temp;
205 gimple_stmt_iterator gsi;
206 tree val = SSA_NAME_VAR (name);
207
208 n = id->decl_map->get (val);
209 if (n != NULL)
210 val = *n;
211 if (TREE_CODE (val) != PARM_DECL
212 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
213 {
214 processing_debug_stmt = -1;
215 return name;
216 }
217 n = id->decl_map->get (val);
218 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
219 return *n;
220 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
221 DECL_ARTIFICIAL (vexpr) = 1;
222 TREE_TYPE (vexpr) = TREE_TYPE (name);
223 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
224 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
225 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
226 insert_decl_map (id, val, vexpr);
227 return vexpr;
228 }
229
230 processing_debug_stmt = -1;
231 return name;
232 }
233
234 /* Remap anonymous SSA names or SSA names of anonymous decls. */
235 var = SSA_NAME_VAR (name);
236 if (!var
237 || (!SSA_NAME_IS_DEFAULT_DEF (name)
238 && VAR_P (var)
239 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
240 && DECL_ARTIFICIAL (var)
241 && DECL_IGNORED_P (var)
242 && !DECL_NAME (var)))
243 {
244 struct ptr_info_def *pi;
245 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
246 if (!var && SSA_NAME_IDENTIFIER (name))
247 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
248 insert_decl_map (id, name, new_tree);
249 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
250 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
251 /* At least IPA points-to info can be directly transferred. */
252 if (id->src_cfun->gimple_df
253 && id->src_cfun->gimple_df->ipa_pta
254 && POINTER_TYPE_P (TREE_TYPE (name))
255 && (pi = SSA_NAME_PTR_INFO (name))
256 && !pi->pt.anything)
257 {
258 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
259 new_pi->pt = pi->pt;
260 }
261 return new_tree;
262 }
263
264 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
265 in copy_bb. */
266 new_tree = remap_decl (var, id);
267
268 /* We might've substituted constant or another SSA_NAME for
269 the variable.
270
271 Replace the SSA name representing RESULT_DECL by variable during
272 inlining: this saves us from need to introduce PHI node in a case
273 return value is just partly initialized. */
274 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
275 && (!SSA_NAME_VAR (name)
276 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
277 || !id->transform_return_to_modify))
278 {
279 struct ptr_info_def *pi;
280 new_tree = make_ssa_name (new_tree);
281 insert_decl_map (id, name, new_tree);
282 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
283 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
284 /* At least IPA points-to info can be directly transferred. */
285 if (id->src_cfun->gimple_df
286 && id->src_cfun->gimple_df->ipa_pta
287 && POINTER_TYPE_P (TREE_TYPE (name))
288 && (pi = SSA_NAME_PTR_INFO (name))
289 && !pi->pt.anything)
290 {
291 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
292 new_pi->pt = pi->pt;
293 }
294 if (SSA_NAME_IS_DEFAULT_DEF (name))
295 {
296 /* By inlining function having uninitialized variable, we might
297 extend the lifetime (variable might get reused). This cause
298 ICE in the case we end up extending lifetime of SSA name across
299 abnormal edge, but also increase register pressure.
300
301 We simply initialize all uninitialized vars by 0 except
302 for case we are inlining to very first BB. We can avoid
303 this for all BBs that are not inside strongly connected
304 regions of the CFG, but this is expensive to test. */
305 if (id->entry_bb
306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
307 && (!SSA_NAME_VAR (name)
308 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
309 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
310 0)->dest
311 || EDGE_COUNT (id->entry_bb->preds) != 1))
312 {
313 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
314 gimple *init_stmt;
315 tree zero = build_zero_cst (TREE_TYPE (new_tree));
316
317 init_stmt = gimple_build_assign (new_tree, zero);
318 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
319 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
320 }
321 else
322 {
323 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
324 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
325 }
326 }
327 }
328 else
329 insert_decl_map (id, name, new_tree);
330 return new_tree;
331 }
332
333 /* Remap DECL during the copying of the BLOCK tree for the function. */
334
335 tree
336 remap_decl (tree decl, copy_body_data *id)
337 {
338 tree *n;
339
340 /* We only remap local variables in the current function. */
341
342 /* See if we have remapped this declaration. */
343
344 n = id->decl_map->get (decl);
345
346 if (!n && processing_debug_stmt)
347 {
348 processing_debug_stmt = -1;
349 return decl;
350 }
351
352 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
353 necessary DECLs have already been remapped and we do not want to duplicate
354 a decl coming from outside of the sequence we are copying. */
355 if (!n
356 && id->prevent_decl_creation_for_types
357 && id->remapping_type_depth > 0
358 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
359 return decl;
360
361 /* If we didn't already have an equivalent for this declaration, create one
362 now. */
363 if (!n)
364 {
365 /* Make a copy of the variable or label. */
366 tree t = id->copy_decl (decl, id);
367
368 /* Remember it, so that if we encounter this local entity again
369 we can reuse this copy. Do this early because remap_type may
370 need this decl for TYPE_STUB_DECL. */
371 insert_decl_map (id, decl, t);
372
373 if (!DECL_P (t))
374 return t;
375
376 /* Remap types, if necessary. */
377 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
378 if (TREE_CODE (t) == TYPE_DECL)
379 {
380 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
381
382 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
383 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
384 is not set on the TYPE_DECL, for example in LTO mode. */
385 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
386 {
387 tree x = build_variant_type_copy (TREE_TYPE (t));
388 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
389 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
390 DECL_ORIGINAL_TYPE (t) = x;
391 }
392 }
393
394 /* Remap sizes as necessary. */
395 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
396 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
397
398 /* If fields, do likewise for offset and qualifier. */
399 if (TREE_CODE (t) == FIELD_DECL)
400 {
401 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
402 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
403 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
404 }
405
406 return t;
407 }
408
409 if (id->do_not_unshare)
410 return *n;
411 else
412 return unshare_expr (*n);
413 }
414
415 static tree
416 remap_type_1 (tree type, copy_body_data *id)
417 {
418 tree new_tree, t;
419
420 /* We do need a copy. build and register it now. If this is a pointer or
421 reference type, remap the designated type and make a new pointer or
422 reference type. */
423 if (TREE_CODE (type) == POINTER_TYPE)
424 {
425 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
426 TYPE_MODE (type),
427 TYPE_REF_CAN_ALIAS_ALL (type));
428 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
429 new_tree = build_type_attribute_qual_variant (new_tree,
430 TYPE_ATTRIBUTES (type),
431 TYPE_QUALS (type));
432 insert_decl_map (id, type, new_tree);
433 return new_tree;
434 }
435 else if (TREE_CODE (type) == REFERENCE_TYPE)
436 {
437 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
438 TYPE_MODE (type),
439 TYPE_REF_CAN_ALIAS_ALL (type));
440 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
441 new_tree = build_type_attribute_qual_variant (new_tree,
442 TYPE_ATTRIBUTES (type),
443 TYPE_QUALS (type));
444 insert_decl_map (id, type, new_tree);
445 return new_tree;
446 }
447 else
448 new_tree = copy_node (type);
449
450 insert_decl_map (id, type, new_tree);
451
452 /* This is a new type, not a copy of an old type. Need to reassociate
453 variants. We can handle everything except the main variant lazily. */
454 t = TYPE_MAIN_VARIANT (type);
455 if (type != t)
456 {
457 t = remap_type (t, id);
458 TYPE_MAIN_VARIANT (new_tree) = t;
459 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
460 TYPE_NEXT_VARIANT (t) = new_tree;
461 }
462 else
463 {
464 TYPE_MAIN_VARIANT (new_tree) = new_tree;
465 TYPE_NEXT_VARIANT (new_tree) = NULL;
466 }
467
468 if (TYPE_STUB_DECL (type))
469 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
470
471 /* Lazily create pointer and reference types. */
472 TYPE_POINTER_TO (new_tree) = NULL;
473 TYPE_REFERENCE_TO (new_tree) = NULL;
474
475 /* Copy all types that may contain references to local variables; be sure to
476 preserve sharing in between type and its main variant when possible. */
477 switch (TREE_CODE (new_tree))
478 {
479 case INTEGER_TYPE:
480 case REAL_TYPE:
481 case FIXED_POINT_TYPE:
482 case ENUMERAL_TYPE:
483 case BOOLEAN_TYPE:
484 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
485 {
486 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
487 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
488
489 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
490 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
491 }
492 else
493 {
494 t = TYPE_MIN_VALUE (new_tree);
495 if (t && TREE_CODE (t) != INTEGER_CST)
496 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
497
498 t = TYPE_MAX_VALUE (new_tree);
499 if (t && TREE_CODE (t) != INTEGER_CST)
500 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
501 }
502 return new_tree;
503
504 case FUNCTION_TYPE:
505 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
506 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
507 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
508 else
509 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
510 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
511 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
512 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
513 else
514 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
515 return new_tree;
516
517 case ARRAY_TYPE:
518 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
519 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
520 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
521 else
522 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
523
524 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
525 {
526 gcc_checking_assert (TYPE_DOMAIN (type)
527 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
528 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
529 }
530 else
531 {
532 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
533 /* For array bounds where we have decided not to copy over the bounds
534 variable which isn't used in OpenMP/OpenACC region, change them to
535 an uninitialized VAR_DECL temporary. */
536 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
537 && id->adjust_array_error_bounds
538 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
539 {
540 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
541 DECL_ATTRIBUTES (v)
542 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
543 DECL_ATTRIBUTES (v));
544 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
545 }
546 }
547 break;
548
549 case RECORD_TYPE:
550 case UNION_TYPE:
551 case QUAL_UNION_TYPE:
552 if (TYPE_MAIN_VARIANT (type) != type
553 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
554 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
555 else
556 {
557 tree f, nf = NULL;
558
559 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
560 {
561 t = remap_decl (f, id);
562 DECL_CONTEXT (t) = new_tree;
563 DECL_CHAIN (t) = nf;
564 nf = t;
565 }
566 TYPE_FIELDS (new_tree) = nreverse (nf);
567 }
568 break;
569
570 case OFFSET_TYPE:
571 default:
572 /* Shouldn't have been thought variable sized. */
573 gcc_unreachable ();
574 }
575
576 /* All variants of type share the same size, so use the already remaped data. */
577 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
578 {
579 tree s = TYPE_SIZE (type);
580 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
581 tree su = TYPE_SIZE_UNIT (type);
582 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
583 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
584 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
585 || s == mvs);
586 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
587 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
588 || su == mvsu);
589 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
590 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
591 }
592 else
593 {
594 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
595 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
596 }
597
598 return new_tree;
599 }
600
601 tree
602 remap_type (tree type, copy_body_data *id)
603 {
604 tree *node;
605 tree tmp;
606
607 if (type == NULL)
608 return type;
609
610 /* See if we have remapped this type. */
611 node = id->decl_map->get (type);
612 if (node)
613 return *node;
614
615 /* The type only needs remapping if it's variably modified. */
616 if (! variably_modified_type_p (type, id->src_fn))
617 {
618 insert_decl_map (id, type, type);
619 return type;
620 }
621
622 id->remapping_type_depth++;
623 tmp = remap_type_1 (type, id);
624 id->remapping_type_depth--;
625
626 return tmp;
627 }
628
629 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
630
631 static bool
632 can_be_nonlocal (tree decl, copy_body_data *id)
633 {
634 /* We cannot duplicate function decls. */
635 if (TREE_CODE (decl) == FUNCTION_DECL)
636 return true;
637
638 /* Local static vars must be non-local or we get multiple declaration
639 problems. */
640 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
641 return true;
642
643 return false;
644 }
645
646 static tree
647 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
648 copy_body_data *id)
649 {
650 tree old_var;
651 tree new_decls = NULL_TREE;
652
653 /* Remap its variables. */
654 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
655 {
656 tree new_var;
657
658 if (can_be_nonlocal (old_var, id))
659 {
660 /* We need to add this variable to the local decls as otherwise
661 nothing else will do so. */
662 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
663 add_local_decl (cfun, old_var);
664 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
665 && !DECL_IGNORED_P (old_var)
666 && nonlocalized_list)
667 vec_safe_push (*nonlocalized_list, old_var);
668 continue;
669 }
670
671 /* Remap the variable. */
672 new_var = remap_decl (old_var, id);
673
674 /* If we didn't remap this variable, we can't mess with its
675 TREE_CHAIN. If we remapped this variable to the return slot, it's
676 already declared somewhere else, so don't declare it here. */
677
678 if (new_var == id->retvar)
679 ;
680 else if (!new_var)
681 {
682 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
683 && !DECL_IGNORED_P (old_var)
684 && nonlocalized_list)
685 vec_safe_push (*nonlocalized_list, old_var);
686 }
687 else
688 {
689 gcc_assert (DECL_P (new_var));
690 DECL_CHAIN (new_var) = new_decls;
691 new_decls = new_var;
692
693 /* Also copy value-expressions. */
694 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
695 {
696 tree tem = DECL_VALUE_EXPR (new_var);
697 bool old_regimplify = id->regimplify;
698 id->remapping_type_depth++;
699 walk_tree (&tem, copy_tree_body_r, id, NULL);
700 id->remapping_type_depth--;
701 id->regimplify = old_regimplify;
702 SET_DECL_VALUE_EXPR (new_var, tem);
703 }
704 }
705 }
706
707 return nreverse (new_decls);
708 }
709
710 /* Copy the BLOCK to contain remapped versions of the variables
711 therein. And hook the new block into the block-tree. */
712
713 static void
714 remap_block (tree *block, copy_body_data *id)
715 {
716 tree old_block;
717 tree new_block;
718
719 /* Make the new block. */
720 old_block = *block;
721 new_block = make_node (BLOCK);
722 TREE_USED (new_block) = TREE_USED (old_block);
723 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
724 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
725 BLOCK_NONLOCALIZED_VARS (new_block)
726 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
727 *block = new_block;
728
729 /* Remap its variables. */
730 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
731 &BLOCK_NONLOCALIZED_VARS (new_block),
732 id);
733
734 if (id->transform_lang_insert_block)
735 id->transform_lang_insert_block (new_block);
736
737 /* Remember the remapped block. */
738 insert_decl_map (id, old_block, new_block);
739 }
740
741 /* Copy the whole block tree and root it in id->block. */
742
743 static tree
744 remap_blocks (tree block, copy_body_data *id)
745 {
746 tree t;
747 tree new_tree = block;
748
749 if (!block)
750 return NULL;
751
752 remap_block (&new_tree, id);
753 gcc_assert (new_tree != block);
754 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
755 prepend_lexical_block (new_tree, remap_blocks (t, id));
756 /* Blocks are in arbitrary order, but make things slightly prettier and do
757 not swap order when producing a copy. */
758 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
759 return new_tree;
760 }
761
762 /* Remap the block tree rooted at BLOCK to nothing. */
763
764 static void
765 remap_blocks_to_null (tree block, copy_body_data *id)
766 {
767 tree t;
768 insert_decl_map (id, block, NULL_TREE);
769 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
770 remap_blocks_to_null (t, id);
771 }
772
773 /* Remap the location info pointed to by LOCUS. */
774
775 static location_t
776 remap_location (location_t locus, copy_body_data *id)
777 {
778 if (LOCATION_BLOCK (locus))
779 {
780 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
781 gcc_assert (n);
782 if (*n)
783 return set_block (locus, *n);
784 }
785
786 locus = LOCATION_LOCUS (locus);
787
788 if (locus != UNKNOWN_LOCATION && id->block)
789 return set_block (locus, id->block);
790
791 return locus;
792 }
793
794 static void
795 copy_statement_list (tree *tp)
796 {
797 tree_stmt_iterator oi, ni;
798 tree new_tree;
799
800 new_tree = alloc_stmt_list ();
801 ni = tsi_start (new_tree);
802 oi = tsi_start (*tp);
803 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
804 *tp = new_tree;
805
806 for (; !tsi_end_p (oi); tsi_next (&oi))
807 {
808 tree stmt = tsi_stmt (oi);
809 if (TREE_CODE (stmt) == STATEMENT_LIST)
810 /* This copy is not redundant; tsi_link_after will smash this
811 STATEMENT_LIST into the end of the one we're building, and we
812 don't want to do that with the original. */
813 copy_statement_list (&stmt);
814 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
815 }
816 }
817
818 static void
819 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
820 {
821 tree block = BIND_EXPR_BLOCK (*tp);
822 /* Copy (and replace) the statement. */
823 copy_tree_r (tp, walk_subtrees, NULL);
824 if (block)
825 {
826 remap_block (&block, id);
827 BIND_EXPR_BLOCK (*tp) = block;
828 }
829
830 if (BIND_EXPR_VARS (*tp))
831 /* This will remap a lot of the same decls again, but this should be
832 harmless. */
833 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
834 }
835
836
837 /* Create a new gimple_seq by remapping all the statements in BODY
838 using the inlining information in ID. */
839
840 static gimple_seq
841 remap_gimple_seq (gimple_seq body, copy_body_data *id)
842 {
843 gimple_stmt_iterator si;
844 gimple_seq new_body = NULL;
845
846 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
847 {
848 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
849 gimple_seq_add_seq (&new_body, new_stmts);
850 }
851
852 return new_body;
853 }
854
855
856 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
857 block using the mapping information in ID. */
858
859 static gimple *
860 copy_gimple_bind (gbind *stmt, copy_body_data *id)
861 {
862 gimple *new_bind;
863 tree new_block, new_vars;
864 gimple_seq body, new_body;
865
866 /* Copy the statement. Note that we purposely don't use copy_stmt
867 here because we need to remap statements as we copy. */
868 body = gimple_bind_body (stmt);
869 new_body = remap_gimple_seq (body, id);
870
871 new_block = gimple_bind_block (stmt);
872 if (new_block)
873 remap_block (&new_block, id);
874
875 /* This will remap a lot of the same decls again, but this should be
876 harmless. */
877 new_vars = gimple_bind_vars (stmt);
878 if (new_vars)
879 new_vars = remap_decls (new_vars, NULL, id);
880
881 new_bind = gimple_build_bind (new_vars, new_body, new_block);
882
883 return new_bind;
884 }
885
886 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
887
888 static bool
889 is_parm (tree decl)
890 {
891 if (TREE_CODE (decl) == SSA_NAME)
892 {
893 decl = SSA_NAME_VAR (decl);
894 if (!decl)
895 return false;
896 }
897
898 return (TREE_CODE (decl) == PARM_DECL);
899 }
900
901 /* Remap the dependence CLIQUE from the source to the destination function
902 as specified in ID. */
903
904 static unsigned short
905 remap_dependence_clique (copy_body_data *id, unsigned short clique)
906 {
907 if (clique == 0 || processing_debug_stmt)
908 return 0;
909 if (!id->dependence_map)
910 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
911 bool existed;
912 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
913 if (!existed)
914 {
915 /* Clique 1 is reserved for local ones set by PTA. */
916 if (cfun->last_clique == 0)
917 cfun->last_clique = 1;
918 newc = ++cfun->last_clique;
919 }
920 return newc;
921 }
922
923 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
924 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
925 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
926 recursing into the children nodes of *TP. */
927
928 static tree
929 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
930 {
931 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
932 copy_body_data *id = (copy_body_data *) wi_p->info;
933 tree fn = id->src_fn;
934
935 /* For recursive invocations this is no longer the LHS itself. */
936 bool is_lhs = wi_p->is_lhs;
937 wi_p->is_lhs = false;
938
939 if (TREE_CODE (*tp) == SSA_NAME)
940 {
941 *tp = remap_ssa_name (*tp, id);
942 *walk_subtrees = 0;
943 if (is_lhs)
944 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
945 return NULL;
946 }
947 else if (auto_var_in_fn_p (*tp, fn))
948 {
949 /* Local variables and labels need to be replaced by equivalent
950 variables. We don't want to copy static variables; there's
951 only one of those, no matter how many times we inline the
952 containing function. Similarly for globals from an outer
953 function. */
954 tree new_decl;
955
956 /* Remap the declaration. */
957 new_decl = remap_decl (*tp, id);
958 gcc_assert (new_decl);
959 /* Replace this variable with the copy. */
960 STRIP_TYPE_NOPS (new_decl);
961 /* ??? The C++ frontend uses void * pointer zero to initialize
962 any other type. This confuses the middle-end type verification.
963 As cloned bodies do not go through gimplification again the fixup
964 there doesn't trigger. */
965 if (TREE_CODE (new_decl) == INTEGER_CST
966 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
967 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
968 *tp = new_decl;
969 *walk_subtrees = 0;
970 }
971 else if (TREE_CODE (*tp) == STATEMENT_LIST)
972 gcc_unreachable ();
973 else if (TREE_CODE (*tp) == SAVE_EXPR)
974 gcc_unreachable ();
975 else if (TREE_CODE (*tp) == LABEL_DECL
976 && (!DECL_CONTEXT (*tp)
977 || decl_function_context (*tp) == id->src_fn))
978 /* These may need to be remapped for EH handling. */
979 *tp = remap_decl (*tp, id);
980 else if (TREE_CODE (*tp) == FIELD_DECL)
981 {
982 /* If the enclosing record type is variably_modified_type_p, the field
983 has already been remapped. Otherwise, it need not be. */
984 tree *n = id->decl_map->get (*tp);
985 if (n)
986 *tp = *n;
987 *walk_subtrees = 0;
988 }
989 else if (TYPE_P (*tp))
990 /* Types may need remapping as well. */
991 *tp = remap_type (*tp, id);
992 else if (CONSTANT_CLASS_P (*tp))
993 {
994 /* If this is a constant, we have to copy the node iff the type
995 will be remapped. copy_tree_r will not copy a constant. */
996 tree new_type = remap_type (TREE_TYPE (*tp), id);
997
998 if (new_type == TREE_TYPE (*tp))
999 *walk_subtrees = 0;
1000
1001 else if (TREE_CODE (*tp) == INTEGER_CST)
1002 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1003 else
1004 {
1005 *tp = copy_node (*tp);
1006 TREE_TYPE (*tp) = new_type;
1007 }
1008 }
1009 else
1010 {
1011 /* Otherwise, just copy the node. Note that copy_tree_r already
1012 knows not to copy VAR_DECLs, etc., so this is safe. */
1013
1014 if (TREE_CODE (*tp) == MEM_REF)
1015 {
1016 /* We need to re-canonicalize MEM_REFs from inline substitutions
1017 that can happen when a pointer argument is an ADDR_EXPR.
1018 Recurse here manually to allow that. */
1019 tree ptr = TREE_OPERAND (*tp, 0);
1020 tree type = remap_type (TREE_TYPE (*tp), id);
1021 tree old = *tp;
1022 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1023 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1024 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1025 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1026 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1027 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1028 {
1029 MR_DEPENDENCE_CLIQUE (*tp)
1030 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1031 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1032 }
1033 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1034 remapped a parameter as the property might be valid only
1035 for the parameter itself. */
1036 if (TREE_THIS_NOTRAP (old)
1037 && (!is_parm (TREE_OPERAND (old, 0))
1038 || (!id->transform_parameter && is_parm (ptr))))
1039 TREE_THIS_NOTRAP (*tp) = 1;
1040 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1041 *walk_subtrees = 0;
1042 return NULL;
1043 }
1044
1045 /* Here is the "usual case". Copy this tree node, and then
1046 tweak some special cases. */
1047 copy_tree_r (tp, walk_subtrees, NULL);
1048
1049 if (TREE_CODE (*tp) != OMP_CLAUSE)
1050 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1051
1052 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1053 {
1054 /* The copied TARGET_EXPR has never been expanded, even if the
1055 original node was expanded already. */
1056 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1057 TREE_OPERAND (*tp, 3) = NULL_TREE;
1058 }
1059 else if (TREE_CODE (*tp) == ADDR_EXPR)
1060 {
1061 /* Variable substitution need not be simple. In particular,
1062 the MEM_REF substitution above. Make sure that
1063 TREE_CONSTANT and friends are up-to-date. */
1064 int invariant = is_gimple_min_invariant (*tp);
1065 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1066 recompute_tree_invariant_for_addr_expr (*tp);
1067
1068 /* If this used to be invariant, but is not any longer,
1069 then regimplification is probably needed. */
1070 if (invariant && !is_gimple_min_invariant (*tp))
1071 id->regimplify = true;
1072
1073 *walk_subtrees = 0;
1074 }
1075 }
1076
1077 /* Update the TREE_BLOCK for the cloned expr. */
1078 if (EXPR_P (*tp))
1079 {
1080 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1081 tree old_block = TREE_BLOCK (*tp);
1082 if (old_block)
1083 {
1084 tree *n;
1085 n = id->decl_map->get (TREE_BLOCK (*tp));
1086 if (n)
1087 new_block = *n;
1088 }
1089 TREE_SET_BLOCK (*tp, new_block);
1090 }
1091
1092 /* Keep iterating. */
1093 return NULL_TREE;
1094 }
1095
1096
1097 /* Called from copy_body_id via walk_tree. DATA is really a
1098 `copy_body_data *'. */
1099
1100 tree
1101 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1102 {
1103 copy_body_data *id = (copy_body_data *) data;
1104 tree fn = id->src_fn;
1105 tree new_block;
1106
1107 /* Begin by recognizing trees that we'll completely rewrite for the
1108 inlining context. Our output for these trees is completely
1109 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1110 into an edge). Further down, we'll handle trees that get
1111 duplicated and/or tweaked. */
1112
1113 /* When requested, RETURN_EXPRs should be transformed to just the
1114 contained MODIFY_EXPR. The branch semantics of the return will
1115 be handled elsewhere by manipulating the CFG rather than a statement. */
1116 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1117 {
1118 tree assignment = TREE_OPERAND (*tp, 0);
1119
1120 /* If we're returning something, just turn that into an
1121 assignment into the equivalent of the original RESULT_DECL.
1122 If the "assignment" is just the result decl, the result
1123 decl has already been set (e.g. a recent "foo (&result_decl,
1124 ...)"); just toss the entire RETURN_EXPR. */
1125 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1126 {
1127 /* Replace the RETURN_EXPR with (a copy of) the
1128 MODIFY_EXPR hanging underneath. */
1129 *tp = copy_node (assignment);
1130 }
1131 else /* Else the RETURN_EXPR returns no value. */
1132 {
1133 *tp = NULL;
1134 return (tree) (void *)1;
1135 }
1136 }
1137 else if (TREE_CODE (*tp) == SSA_NAME)
1138 {
1139 *tp = remap_ssa_name (*tp, id);
1140 *walk_subtrees = 0;
1141 return NULL;
1142 }
1143
1144 /* Local variables and labels need to be replaced by equivalent
1145 variables. We don't want to copy static variables; there's only
1146 one of those, no matter how many times we inline the containing
1147 function. Similarly for globals from an outer function. */
1148 else if (auto_var_in_fn_p (*tp, fn))
1149 {
1150 tree new_decl;
1151
1152 /* Remap the declaration. */
1153 new_decl = remap_decl (*tp, id);
1154 gcc_assert (new_decl);
1155 /* Replace this variable with the copy. */
1156 STRIP_TYPE_NOPS (new_decl);
1157 *tp = new_decl;
1158 *walk_subtrees = 0;
1159 }
1160 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1161 copy_statement_list (tp);
1162 else if (TREE_CODE (*tp) == SAVE_EXPR
1163 || TREE_CODE (*tp) == TARGET_EXPR)
1164 remap_save_expr (tp, id->decl_map, walk_subtrees);
1165 else if (TREE_CODE (*tp) == LABEL_DECL
1166 && (! DECL_CONTEXT (*tp)
1167 || decl_function_context (*tp) == id->src_fn))
1168 /* These may need to be remapped for EH handling. */
1169 *tp = remap_decl (*tp, id);
1170 else if (TREE_CODE (*tp) == BIND_EXPR)
1171 copy_bind_expr (tp, walk_subtrees, id);
1172 /* Types may need remapping as well. */
1173 else if (TYPE_P (*tp))
1174 *tp = remap_type (*tp, id);
1175
1176 /* If this is a constant, we have to copy the node iff the type will be
1177 remapped. copy_tree_r will not copy a constant. */
1178 else if (CONSTANT_CLASS_P (*tp))
1179 {
1180 tree new_type = remap_type (TREE_TYPE (*tp), id);
1181
1182 if (new_type == TREE_TYPE (*tp))
1183 *walk_subtrees = 0;
1184
1185 else if (TREE_CODE (*tp) == INTEGER_CST)
1186 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1187 else
1188 {
1189 *tp = copy_node (*tp);
1190 TREE_TYPE (*tp) = new_type;
1191 }
1192 }
1193
1194 /* Otherwise, just copy the node. Note that copy_tree_r already
1195 knows not to copy VAR_DECLs, etc., so this is safe. */
1196 else
1197 {
1198 /* Here we handle trees that are not completely rewritten.
1199 First we detect some inlining-induced bogosities for
1200 discarding. */
1201 if (TREE_CODE (*tp) == MODIFY_EXPR
1202 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1203 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1204 {
1205 /* Some assignments VAR = VAR; don't generate any rtl code
1206 and thus don't count as variable modification. Avoid
1207 keeping bogosities like 0 = 0. */
1208 tree decl = TREE_OPERAND (*tp, 0), value;
1209 tree *n;
1210
1211 n = id->decl_map->get (decl);
1212 if (n)
1213 {
1214 value = *n;
1215 STRIP_TYPE_NOPS (value);
1216 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1217 {
1218 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1219 return copy_tree_body_r (tp, walk_subtrees, data);
1220 }
1221 }
1222 }
1223 else if (TREE_CODE (*tp) == INDIRECT_REF)
1224 {
1225 /* Get rid of *& from inline substitutions that can happen when a
1226 pointer argument is an ADDR_EXPR. */
1227 tree decl = TREE_OPERAND (*tp, 0);
1228 tree *n = id->decl_map->get (decl);
1229 if (n)
1230 {
1231 /* If we happen to get an ADDR_EXPR in n->value, strip
1232 it manually here as we'll eventually get ADDR_EXPRs
1233 which lie about their types pointed to. In this case
1234 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1235 but we absolutely rely on that. As fold_indirect_ref
1236 does other useful transformations, try that first, though. */
1237 tree type = TREE_TYPE (*tp);
1238 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1239 tree old = *tp;
1240 *tp = gimple_fold_indirect_ref (ptr);
1241 if (! *tp)
1242 {
1243 type = remap_type (type, id);
1244 if (TREE_CODE (ptr) == ADDR_EXPR)
1245 {
1246 *tp
1247 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1248 /* ??? We should either assert here or build
1249 a VIEW_CONVERT_EXPR instead of blindly leaking
1250 incompatible types to our IL. */
1251 if (! *tp)
1252 *tp = TREE_OPERAND (ptr, 0);
1253 }
1254 else
1255 {
1256 *tp = build1 (INDIRECT_REF, type, ptr);
1257 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1258 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1259 TREE_READONLY (*tp) = TREE_READONLY (old);
1260 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1261 have remapped a parameter as the property might be
1262 valid only for the parameter itself. */
1263 if (TREE_THIS_NOTRAP (old)
1264 && (!is_parm (TREE_OPERAND (old, 0))
1265 || (!id->transform_parameter && is_parm (ptr))))
1266 TREE_THIS_NOTRAP (*tp) = 1;
1267 }
1268 }
1269 *walk_subtrees = 0;
1270 return NULL;
1271 }
1272 }
1273 else if (TREE_CODE (*tp) == MEM_REF)
1274 {
1275 /* We need to re-canonicalize MEM_REFs from inline substitutions
1276 that can happen when a pointer argument is an ADDR_EXPR.
1277 Recurse here manually to allow that. */
1278 tree ptr = TREE_OPERAND (*tp, 0);
1279 tree type = remap_type (TREE_TYPE (*tp), id);
1280 tree old = *tp;
1281 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1282 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1283 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1284 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1285 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1286 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1287 {
1288 MR_DEPENDENCE_CLIQUE (*tp)
1289 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1290 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1291 }
1292 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1293 remapped a parameter as the property might be valid only
1294 for the parameter itself. */
1295 if (TREE_THIS_NOTRAP (old)
1296 && (!is_parm (TREE_OPERAND (old, 0))
1297 || (!id->transform_parameter && is_parm (ptr))))
1298 TREE_THIS_NOTRAP (*tp) = 1;
1299 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1300 *walk_subtrees = 0;
1301 return NULL;
1302 }
1303
1304 /* Here is the "usual case". Copy this tree node, and then
1305 tweak some special cases. */
1306 copy_tree_r (tp, walk_subtrees, NULL);
1307
1308 /* If EXPR has block defined, map it to newly constructed block.
1309 When inlining we want EXPRs without block appear in the block
1310 of function call if we are not remapping a type. */
1311 if (EXPR_P (*tp))
1312 {
1313 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1314 if (TREE_BLOCK (*tp))
1315 {
1316 tree *n;
1317 n = id->decl_map->get (TREE_BLOCK (*tp));
1318 if (n)
1319 new_block = *n;
1320 }
1321 TREE_SET_BLOCK (*tp, new_block);
1322 }
1323
1324 if (TREE_CODE (*tp) != OMP_CLAUSE)
1325 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1326
1327 /* The copied TARGET_EXPR has never been expanded, even if the
1328 original node was expanded already. */
1329 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1330 {
1331 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1332 TREE_OPERAND (*tp, 3) = NULL_TREE;
1333 }
1334
1335 /* Variable substitution need not be simple. In particular, the
1336 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1337 and friends are up-to-date. */
1338 else if (TREE_CODE (*tp) == ADDR_EXPR)
1339 {
1340 int invariant = is_gimple_min_invariant (*tp);
1341 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1342
1343 /* Handle the case where we substituted an INDIRECT_REF
1344 into the operand of the ADDR_EXPR. */
1345 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1346 {
1347 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1348 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1349 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1350 *tp = t;
1351 }
1352 else
1353 recompute_tree_invariant_for_addr_expr (*tp);
1354
1355 /* If this used to be invariant, but is not any longer,
1356 then regimplification is probably needed. */
1357 if (invariant && !is_gimple_min_invariant (*tp))
1358 id->regimplify = true;
1359
1360 *walk_subtrees = 0;
1361 }
1362 }
1363
1364 /* Keep iterating. */
1365 return NULL_TREE;
1366 }
1367
1368 /* Helper for remap_gimple_stmt. Given an EH region number for the
1369 source function, map that to the duplicate EH region number in
1370 the destination function. */
1371
1372 static int
1373 remap_eh_region_nr (int old_nr, copy_body_data *id)
1374 {
1375 eh_region old_r, new_r;
1376
1377 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1378 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1379
1380 return new_r->index;
1381 }
1382
1383 /* Similar, but operate on INTEGER_CSTs. */
1384
1385 static tree
1386 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1387 {
1388 int old_nr, new_nr;
1389
1390 old_nr = tree_to_shwi (old_t_nr);
1391 new_nr = remap_eh_region_nr (old_nr, id);
1392
1393 return build_int_cst (integer_type_node, new_nr);
1394 }
1395
1396 /* Helper for copy_bb. Remap statement STMT using the inlining
1397 information in ID. Return the new statement copy. */
1398
1399 static gimple_seq
1400 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1401 {
1402 gimple *copy = NULL;
1403 struct walk_stmt_info wi;
1404 bool skip_first = false;
1405 gimple_seq stmts = NULL;
1406
1407 if (is_gimple_debug (stmt)
1408 && (gimple_debug_nonbind_marker_p (stmt)
1409 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1410 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1411 return NULL;
1412
1413 /* Begin by recognizing trees that we'll completely rewrite for the
1414 inlining context. Our output for these trees is completely
1415 different from our input (e.g. RETURN_EXPR is deleted and morphs
1416 into an edge). Further down, we'll handle trees that get
1417 duplicated and/or tweaked. */
1418
1419 /* When requested, GIMPLE_RETURN should be transformed to just the
1420 contained GIMPLE_ASSIGN. The branch semantics of the return will
1421 be handled elsewhere by manipulating the CFG rather than the
1422 statement. */
1423 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1424 {
1425 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1426
1427 /* If we're returning something, just turn that into an
1428 assignment to the equivalent of the original RESULT_DECL.
1429 If RETVAL is just the result decl, the result decl has
1430 already been set (e.g. a recent "foo (&result_decl, ...)");
1431 just toss the entire GIMPLE_RETURN. */
1432 if (retval
1433 && (TREE_CODE (retval) != RESULT_DECL
1434 && (TREE_CODE (retval) != SSA_NAME
1435 || ! SSA_NAME_VAR (retval)
1436 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1437 {
1438 copy = gimple_build_assign (id->do_not_unshare
1439 ? id->retvar : unshare_expr (id->retvar),
1440 retval);
1441 /* id->retvar is already substituted. Skip it on later remapping. */
1442 skip_first = true;
1443 }
1444 else
1445 return NULL;
1446 }
1447 else if (gimple_has_substatements (stmt))
1448 {
1449 gimple_seq s1, s2;
1450
1451 /* When cloning bodies from the C++ front end, we will be handed bodies
1452 in High GIMPLE form. Handle here all the High GIMPLE statements that
1453 have embedded statements. */
1454 switch (gimple_code (stmt))
1455 {
1456 case GIMPLE_BIND:
1457 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1458 break;
1459
1460 case GIMPLE_CATCH:
1461 {
1462 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1463 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1464 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1465 }
1466 break;
1467
1468 case GIMPLE_EH_FILTER:
1469 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1470 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1471 break;
1472
1473 case GIMPLE_TRY:
1474 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1475 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1476 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1477 break;
1478
1479 case GIMPLE_WITH_CLEANUP_EXPR:
1480 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1481 copy = gimple_build_wce (s1);
1482 break;
1483
1484 case GIMPLE_OMP_PARALLEL:
1485 {
1486 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1487 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1488 copy = gimple_build_omp_parallel
1489 (s1,
1490 gimple_omp_parallel_clauses (omp_par_stmt),
1491 gimple_omp_parallel_child_fn (omp_par_stmt),
1492 gimple_omp_parallel_data_arg (omp_par_stmt));
1493 }
1494 break;
1495
1496 case GIMPLE_OMP_TASK:
1497 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1498 copy = gimple_build_omp_task
1499 (s1,
1500 gimple_omp_task_clauses (stmt),
1501 gimple_omp_task_child_fn (stmt),
1502 gimple_omp_task_data_arg (stmt),
1503 gimple_omp_task_copy_fn (stmt),
1504 gimple_omp_task_arg_size (stmt),
1505 gimple_omp_task_arg_align (stmt));
1506 break;
1507
1508 case GIMPLE_OMP_FOR:
1509 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1510 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1511 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1512 gimple_omp_for_clauses (stmt),
1513 gimple_omp_for_collapse (stmt), s2);
1514 {
1515 size_t i;
1516 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1517 {
1518 gimple_omp_for_set_index (copy, i,
1519 gimple_omp_for_index (stmt, i));
1520 gimple_omp_for_set_initial (copy, i,
1521 gimple_omp_for_initial (stmt, i));
1522 gimple_omp_for_set_final (copy, i,
1523 gimple_omp_for_final (stmt, i));
1524 gimple_omp_for_set_incr (copy, i,
1525 gimple_omp_for_incr (stmt, i));
1526 gimple_omp_for_set_cond (copy, i,
1527 gimple_omp_for_cond (stmt, i));
1528 }
1529 }
1530 break;
1531
1532 case GIMPLE_OMP_MASTER:
1533 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1534 copy = gimple_build_omp_master (s1);
1535 break;
1536
1537 case GIMPLE_OMP_TASKGROUP:
1538 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1539 copy = gimple_build_omp_taskgroup
1540 (s1, gimple_omp_taskgroup_clauses (stmt));
1541 break;
1542
1543 case GIMPLE_OMP_ORDERED:
1544 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1545 copy = gimple_build_omp_ordered
1546 (s1,
1547 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1548 break;
1549
1550 case GIMPLE_OMP_SECTION:
1551 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1552 copy = gimple_build_omp_section (s1);
1553 break;
1554
1555 case GIMPLE_OMP_SECTIONS:
1556 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1557 copy = gimple_build_omp_sections
1558 (s1, gimple_omp_sections_clauses (stmt));
1559 break;
1560
1561 case GIMPLE_OMP_SINGLE:
1562 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1563 copy = gimple_build_omp_single
1564 (s1, gimple_omp_single_clauses (stmt));
1565 break;
1566
1567 case GIMPLE_OMP_TARGET:
1568 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1569 copy = gimple_build_omp_target
1570 (s1, gimple_omp_target_kind (stmt),
1571 gimple_omp_target_clauses (stmt));
1572 break;
1573
1574 case GIMPLE_OMP_TEAMS:
1575 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1576 copy = gimple_build_omp_teams
1577 (s1, gimple_omp_teams_clauses (stmt));
1578 break;
1579
1580 case GIMPLE_OMP_CRITICAL:
1581 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1582 copy = gimple_build_omp_critical (s1,
1583 gimple_omp_critical_name
1584 (as_a <gomp_critical *> (stmt)),
1585 gimple_omp_critical_clauses
1586 (as_a <gomp_critical *> (stmt)));
1587 break;
1588
1589 case GIMPLE_TRANSACTION:
1590 {
1591 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1592 gtransaction *new_trans_stmt;
1593 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1594 id);
1595 copy = new_trans_stmt = gimple_build_transaction (s1);
1596 gimple_transaction_set_subcode (new_trans_stmt,
1597 gimple_transaction_subcode (old_trans_stmt));
1598 gimple_transaction_set_label_norm (new_trans_stmt,
1599 gimple_transaction_label_norm (old_trans_stmt));
1600 gimple_transaction_set_label_uninst (new_trans_stmt,
1601 gimple_transaction_label_uninst (old_trans_stmt));
1602 gimple_transaction_set_label_over (new_trans_stmt,
1603 gimple_transaction_label_over (old_trans_stmt));
1604 }
1605 break;
1606
1607 default:
1608 gcc_unreachable ();
1609 }
1610 }
1611 else
1612 {
1613 if (gimple_assign_copy_p (stmt)
1614 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1615 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1616 {
1617 /* Here we handle statements that are not completely rewritten.
1618 First we detect some inlining-induced bogosities for
1619 discarding. */
1620
1621 /* Some assignments VAR = VAR; don't generate any rtl code
1622 and thus don't count as variable modification. Avoid
1623 keeping bogosities like 0 = 0. */
1624 tree decl = gimple_assign_lhs (stmt), value;
1625 tree *n;
1626
1627 n = id->decl_map->get (decl);
1628 if (n)
1629 {
1630 value = *n;
1631 STRIP_TYPE_NOPS (value);
1632 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1633 return NULL;
1634 }
1635 }
1636
1637 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1638 in a block that we aren't copying during tree_function_versioning,
1639 just drop the clobber stmt. */
1640 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1641 {
1642 tree lhs = gimple_assign_lhs (stmt);
1643 if (TREE_CODE (lhs) == MEM_REF
1644 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1645 {
1646 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1647 if (gimple_bb (def_stmt)
1648 && !bitmap_bit_p (id->blocks_to_copy,
1649 gimple_bb (def_stmt)->index))
1650 return NULL;
1651 }
1652 }
1653
1654 if (gimple_debug_bind_p (stmt))
1655 {
1656 gdebug *copy
1657 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1658 gimple_debug_bind_get_value (stmt),
1659 stmt);
1660 if (id->reset_location)
1661 gimple_set_location (copy, input_location);
1662 id->debug_stmts.safe_push (copy);
1663 gimple_seq_add_stmt (&stmts, copy);
1664 return stmts;
1665 }
1666 if (gimple_debug_source_bind_p (stmt))
1667 {
1668 gdebug *copy = gimple_build_debug_source_bind
1669 (gimple_debug_source_bind_get_var (stmt),
1670 gimple_debug_source_bind_get_value (stmt),
1671 stmt);
1672 if (id->reset_location)
1673 gimple_set_location (copy, input_location);
1674 id->debug_stmts.safe_push (copy);
1675 gimple_seq_add_stmt (&stmts, copy);
1676 return stmts;
1677 }
1678 if (gimple_debug_nonbind_marker_p (stmt))
1679 {
1680 /* If the inlined function has too many debug markers,
1681 don't copy them. */
1682 if (id->src_cfun->debug_marker_count
1683 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1684 return stmts;
1685
1686 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1687 if (id->reset_location)
1688 gimple_set_location (copy, input_location);
1689 id->debug_stmts.safe_push (copy);
1690 gimple_seq_add_stmt (&stmts, copy);
1691 return stmts;
1692 }
1693
1694 /* Create a new deep copy of the statement. */
1695 copy = gimple_copy (stmt);
1696
1697 /* Clear flags that need revisiting. */
1698 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1699 {
1700 if (gimple_call_tail_p (call_stmt))
1701 gimple_call_set_tail (call_stmt, false);
1702 if (gimple_call_from_thunk_p (call_stmt))
1703 gimple_call_set_from_thunk (call_stmt, false);
1704 if (gimple_call_internal_p (call_stmt))
1705 switch (gimple_call_internal_fn (call_stmt))
1706 {
1707 case IFN_GOMP_SIMD_LANE:
1708 case IFN_GOMP_SIMD_VF:
1709 case IFN_GOMP_SIMD_LAST_LANE:
1710 case IFN_GOMP_SIMD_ORDERED_START:
1711 case IFN_GOMP_SIMD_ORDERED_END:
1712 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1713 break;
1714 default:
1715 break;
1716 }
1717 }
1718
1719 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1720 RESX and EH_DISPATCH. */
1721 if (id->eh_map)
1722 switch (gimple_code (copy))
1723 {
1724 case GIMPLE_CALL:
1725 {
1726 tree r, fndecl = gimple_call_fndecl (copy);
1727 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1728 switch (DECL_FUNCTION_CODE (fndecl))
1729 {
1730 case BUILT_IN_EH_COPY_VALUES:
1731 r = gimple_call_arg (copy, 1);
1732 r = remap_eh_region_tree_nr (r, id);
1733 gimple_call_set_arg (copy, 1, r);
1734 /* FALLTHRU */
1735
1736 case BUILT_IN_EH_POINTER:
1737 case BUILT_IN_EH_FILTER:
1738 r = gimple_call_arg (copy, 0);
1739 r = remap_eh_region_tree_nr (r, id);
1740 gimple_call_set_arg (copy, 0, r);
1741 break;
1742
1743 default:
1744 break;
1745 }
1746
1747 /* Reset alias info if we didn't apply measures to
1748 keep it valid over inlining by setting DECL_PT_UID. */
1749 if (!id->src_cfun->gimple_df
1750 || !id->src_cfun->gimple_df->ipa_pta)
1751 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1752 }
1753 break;
1754
1755 case GIMPLE_RESX:
1756 {
1757 gresx *resx_stmt = as_a <gresx *> (copy);
1758 int r = gimple_resx_region (resx_stmt);
1759 r = remap_eh_region_nr (r, id);
1760 gimple_resx_set_region (resx_stmt, r);
1761 }
1762 break;
1763
1764 case GIMPLE_EH_DISPATCH:
1765 {
1766 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1767 int r = gimple_eh_dispatch_region (eh_dispatch);
1768 r = remap_eh_region_nr (r, id);
1769 gimple_eh_dispatch_set_region (eh_dispatch, r);
1770 }
1771 break;
1772
1773 default:
1774 break;
1775 }
1776 }
1777
1778 /* If STMT has a block defined, map it to the newly constructed block. */
1779 if (gimple_block (copy))
1780 {
1781 tree *n;
1782 n = id->decl_map->get (gimple_block (copy));
1783 gcc_assert (n);
1784 gimple_set_block (copy, *n);
1785 }
1786
1787 if (id->reset_location)
1788 gimple_set_location (copy, input_location);
1789
1790 /* Debug statements ought to be rebuilt and not copied. */
1791 gcc_checking_assert (!is_gimple_debug (copy));
1792
1793 /* Remap all the operands in COPY. */
1794 memset (&wi, 0, sizeof (wi));
1795 wi.info = id;
1796 if (skip_first)
1797 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1798 else
1799 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1800
1801 /* Clear the copied virtual operands. We are not remapping them here
1802 but are going to recreate them from scratch. */
1803 if (gimple_has_mem_ops (copy))
1804 {
1805 gimple_set_vdef (copy, NULL_TREE);
1806 gimple_set_vuse (copy, NULL_TREE);
1807 }
1808
1809 gimple_seq_add_stmt (&stmts, copy);
1810 return stmts;
1811 }
1812
1813
1814 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1815 later */
1816
1817 static basic_block
1818 copy_bb (copy_body_data *id, basic_block bb,
1819 profile_count num, profile_count den)
1820 {
1821 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1822 basic_block copy_basic_block;
1823 tree decl;
1824 basic_block prev;
1825
1826 profile_count::adjust_for_ipa_scaling (&num, &den);
1827
1828 /* Search for previous copied basic block. */
1829 prev = bb->prev_bb;
1830 while (!prev->aux)
1831 prev = prev->prev_bb;
1832
1833 /* create_basic_block() will append every new block to
1834 basic_block_info automatically. */
1835 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1836 copy_basic_block->count = bb->count.apply_scale (num, den);
1837
1838 copy_gsi = gsi_start_bb (copy_basic_block);
1839
1840 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1841 {
1842 gimple_seq stmts;
1843 gimple *stmt = gsi_stmt (gsi);
1844 gimple *orig_stmt = stmt;
1845 gimple_stmt_iterator stmts_gsi;
1846 bool stmt_added = false;
1847
1848 id->regimplify = false;
1849 stmts = remap_gimple_stmt (stmt, id);
1850
1851 if (gimple_seq_empty_p (stmts))
1852 continue;
1853
1854 seq_gsi = copy_gsi;
1855
1856 for (stmts_gsi = gsi_start (stmts);
1857 !gsi_end_p (stmts_gsi); )
1858 {
1859 stmt = gsi_stmt (stmts_gsi);
1860
1861 /* Advance iterator now before stmt is moved to seq_gsi. */
1862 gsi_next (&stmts_gsi);
1863
1864 if (gimple_nop_p (stmt))
1865 continue;
1866
1867 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1868 orig_stmt);
1869
1870 /* With return slot optimization we can end up with
1871 non-gimple (foo *)&this->m, fix that here. */
1872 if (is_gimple_assign (stmt)
1873 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1874 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1875 {
1876 tree new_rhs;
1877 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1878 gimple_assign_rhs1 (stmt),
1879 true, NULL, false,
1880 GSI_CONTINUE_LINKING);
1881 gimple_assign_set_rhs1 (stmt, new_rhs);
1882 id->regimplify = false;
1883 }
1884
1885 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1886
1887 if (id->regimplify)
1888 gimple_regimplify_operands (stmt, &seq_gsi);
1889
1890 stmt_added = true;
1891 }
1892
1893 if (!stmt_added)
1894 continue;
1895
1896 /* If copy_basic_block has been empty at the start of this iteration,
1897 call gsi_start_bb again to get at the newly added statements. */
1898 if (gsi_end_p (copy_gsi))
1899 copy_gsi = gsi_start_bb (copy_basic_block);
1900 else
1901 gsi_next (&copy_gsi);
1902
1903 /* Process the new statement. The call to gimple_regimplify_operands
1904 possibly turned the statement into multiple statements, we
1905 need to process all of them. */
1906 do
1907 {
1908 tree fn;
1909 gcall *call_stmt;
1910
1911 stmt = gsi_stmt (copy_gsi);
1912 call_stmt = dyn_cast <gcall *> (stmt);
1913 if (call_stmt
1914 && gimple_call_va_arg_pack_p (call_stmt)
1915 && id->call_stmt
1916 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1917 {
1918 /* __builtin_va_arg_pack () should be replaced by
1919 all arguments corresponding to ... in the caller. */
1920 tree p;
1921 gcall *new_call;
1922 vec<tree> argarray;
1923 size_t nargs = gimple_call_num_args (id->call_stmt);
1924 size_t n;
1925
1926 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1927 nargs--;
1928
1929 /* Create the new array of arguments. */
1930 n = nargs + gimple_call_num_args (call_stmt);
1931 argarray.create (n);
1932 argarray.safe_grow_cleared (n);
1933
1934 /* Copy all the arguments before '...' */
1935 memcpy (argarray.address (),
1936 gimple_call_arg_ptr (call_stmt, 0),
1937 gimple_call_num_args (call_stmt) * sizeof (tree));
1938
1939 /* Append the arguments passed in '...' */
1940 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1941 gimple_call_arg_ptr (id->call_stmt, 0)
1942 + (gimple_call_num_args (id->call_stmt) - nargs),
1943 nargs * sizeof (tree));
1944
1945 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1946 argarray);
1947
1948 argarray.release ();
1949
1950 /* Copy all GIMPLE_CALL flags, location and block, except
1951 GF_CALL_VA_ARG_PACK. */
1952 gimple_call_copy_flags (new_call, call_stmt);
1953 gimple_call_set_va_arg_pack (new_call, false);
1954 gimple_set_location (new_call, gimple_location (stmt));
1955 gimple_set_block (new_call, gimple_block (stmt));
1956 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1957
1958 gsi_replace (&copy_gsi, new_call, false);
1959 stmt = new_call;
1960 }
1961 else if (call_stmt
1962 && id->call_stmt
1963 && (decl = gimple_call_fndecl (stmt))
1964 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
1965 {
1966 /* __builtin_va_arg_pack_len () should be replaced by
1967 the number of anonymous arguments. */
1968 size_t nargs = gimple_call_num_args (id->call_stmt);
1969 tree count, p;
1970 gimple *new_stmt;
1971
1972 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1973 nargs--;
1974
1975 if (!gimple_call_lhs (stmt))
1976 {
1977 /* Drop unused calls. */
1978 gsi_remove (&copy_gsi, false);
1979 continue;
1980 }
1981 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
1982 {
1983 count = build_int_cst (integer_type_node, nargs);
1984 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1985 gsi_replace (&copy_gsi, new_stmt, false);
1986 stmt = new_stmt;
1987 }
1988 else if (nargs != 0)
1989 {
1990 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
1991 count = build_int_cst (integer_type_node, nargs);
1992 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1993 PLUS_EXPR, newlhs, count);
1994 gimple_call_set_lhs (stmt, newlhs);
1995 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
1996 }
1997 }
1998 else if (call_stmt
1999 && id->call_stmt
2000 && gimple_call_internal_p (stmt)
2001 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2002 {
2003 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2004 gsi_remove (&copy_gsi, false);
2005 continue;
2006 }
2007
2008 /* Statements produced by inlining can be unfolded, especially
2009 when we constant propagated some operands. We can't fold
2010 them right now for two reasons:
2011 1) folding require SSA_NAME_DEF_STMTs to be correct
2012 2) we can't change function calls to builtins.
2013 So we just mark statement for later folding. We mark
2014 all new statements, instead just statements that has changed
2015 by some nontrivial substitution so even statements made
2016 foldable indirectly are updated. If this turns out to be
2017 expensive, copy_body can be told to watch for nontrivial
2018 changes. */
2019 if (id->statements_to_fold)
2020 id->statements_to_fold->add (stmt);
2021
2022 /* We're duplicating a CALL_EXPR. Find any corresponding
2023 callgraph edges and update or duplicate them. */
2024 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2025 {
2026 struct cgraph_edge *edge;
2027
2028 switch (id->transform_call_graph_edges)
2029 {
2030 case CB_CGE_DUPLICATE:
2031 edge = id->src_node->get_edge (orig_stmt);
2032 if (edge)
2033 {
2034 struct cgraph_edge *old_edge = edge;
2035 profile_count old_cnt = edge->count;
2036 edge = edge->clone (id->dst_node, call_stmt,
2037 gimple_uid (stmt),
2038 num, den,
2039 true);
2040
2041 /* Speculative calls consist of two edges - direct and
2042 indirect. Duplicate the whole thing and distribute
2043 frequencies accordingly. */
2044 if (edge->speculative)
2045 {
2046 struct cgraph_edge *direct, *indirect;
2047 struct ipa_ref *ref;
2048
2049 gcc_assert (!edge->indirect_unknown_callee);
2050 old_edge->speculative_call_info (direct, indirect, ref);
2051
2052 profile_count indir_cnt = indirect->count;
2053 indirect = indirect->clone (id->dst_node, call_stmt,
2054 gimple_uid (stmt),
2055 num, den,
2056 true);
2057
2058 profile_probability prob
2059 = indir_cnt.probability_in (old_cnt + indir_cnt);
2060 indirect->count
2061 = copy_basic_block->count.apply_probability (prob);
2062 edge->count = copy_basic_block->count - indirect->count;
2063 id->dst_node->clone_reference (ref, stmt);
2064 }
2065 else
2066 edge->count = copy_basic_block->count;
2067 }
2068 break;
2069
2070 case CB_CGE_MOVE_CLONES:
2071 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2072 call_stmt);
2073 edge = id->dst_node->get_edge (stmt);
2074 break;
2075
2076 case CB_CGE_MOVE:
2077 edge = id->dst_node->get_edge (orig_stmt);
2078 if (edge)
2079 edge->set_call_stmt (call_stmt);
2080 break;
2081
2082 default:
2083 gcc_unreachable ();
2084 }
2085
2086 /* Constant propagation on argument done during inlining
2087 may create new direct call. Produce an edge for it. */
2088 if ((!edge
2089 || (edge->indirect_inlining_edge
2090 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2091 && id->dst_node->definition
2092 && (fn = gimple_call_fndecl (stmt)) != NULL)
2093 {
2094 struct cgraph_node *dest = cgraph_node::get_create (fn);
2095
2096 /* We have missing edge in the callgraph. This can happen
2097 when previous inlining turned an indirect call into a
2098 direct call by constant propagating arguments or we are
2099 producing dead clone (for further cloning). In all
2100 other cases we hit a bug (incorrect node sharing is the
2101 most common reason for missing edges). */
2102 gcc_assert (!dest->definition
2103 || dest->address_taken
2104 || !id->src_node->definition
2105 || !id->dst_node->definition);
2106 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2107 id->dst_node->create_edge_including_clones
2108 (dest, orig_stmt, call_stmt, bb->count,
2109 CIF_ORIGINALLY_INDIRECT_CALL);
2110 else
2111 id->dst_node->create_edge (dest, call_stmt,
2112 bb->count)->inline_failed
2113 = CIF_ORIGINALLY_INDIRECT_CALL;
2114 if (dump_file)
2115 {
2116 fprintf (dump_file, "Created new direct edge to %s\n",
2117 dest->name ());
2118 }
2119 }
2120
2121 notice_special_calls (as_a <gcall *> (stmt));
2122 }
2123
2124 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2125 id->eh_map, id->eh_lp_nr);
2126
2127 gsi_next (&copy_gsi);
2128 }
2129 while (!gsi_end_p (copy_gsi));
2130
2131 copy_gsi = gsi_last_bb (copy_basic_block);
2132 }
2133
2134 return copy_basic_block;
2135 }
2136
2137 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2138 form is quite easy, since dominator relationship for old basic blocks does
2139 not change.
2140
2141 There is however exception where inlining might change dominator relation
2142 across EH edges from basic block within inlined functions destinating
2143 to landing pads in function we inline into.
2144
2145 The function fills in PHI_RESULTs of such PHI nodes if they refer
2146 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2147 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2148 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2149 set, and this means that there will be no overlapping live ranges
2150 for the underlying symbol.
2151
2152 This might change in future if we allow redirecting of EH edges and
2153 we might want to change way build CFG pre-inlining to include
2154 all the possible edges then. */
2155 static void
2156 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2157 bool can_throw, bool nonlocal_goto)
2158 {
2159 edge e;
2160 edge_iterator ei;
2161
2162 FOR_EACH_EDGE (e, ei, bb->succs)
2163 if (!e->dest->aux
2164 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2165 {
2166 gphi *phi;
2167 gphi_iterator si;
2168
2169 if (!nonlocal_goto)
2170 gcc_assert (e->flags & EDGE_EH);
2171
2172 if (!can_throw)
2173 gcc_assert (!(e->flags & EDGE_EH));
2174
2175 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2176 {
2177 edge re;
2178
2179 phi = si.phi ();
2180
2181 /* For abnormal goto/call edges the receiver can be the
2182 ENTRY_BLOCK. Do not assert this cannot happen. */
2183
2184 gcc_assert ((e->flags & EDGE_EH)
2185 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2186
2187 re = find_edge (ret_bb, e->dest);
2188 gcc_checking_assert (re);
2189 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2190 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2191
2192 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2193 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2194 }
2195 }
2196 }
2197
2198 /* Insert clobbers for automatic variables of inlined ID->src_fn
2199 function at the start of basic block BB. */
2200
2201 static void
2202 add_clobbers_to_eh_landing_pad (basic_block bb, copy_body_data *id)
2203 {
2204 tree var;
2205 unsigned int i;
2206 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2207 if (VAR_P (var)
2208 && !DECL_HARD_REGISTER (var)
2209 && !TREE_THIS_VOLATILE (var)
2210 && !DECL_HAS_VALUE_EXPR_P (var)
2211 && !is_gimple_reg (var)
2212 && auto_var_in_fn_p (var, id->src_fn)
2213 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2214 {
2215 tree *t = id->decl_map->get (var);
2216 if (!t)
2217 continue;
2218 tree new_var = *t;
2219 if (VAR_P (new_var)
2220 && !DECL_HARD_REGISTER (new_var)
2221 && !TREE_THIS_VOLATILE (new_var)
2222 && !DECL_HAS_VALUE_EXPR_P (new_var)
2223 && !is_gimple_reg (new_var)
2224 && auto_var_in_fn_p (new_var, id->dst_fn))
2225 {
2226 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2227 tree clobber = build_clobber (TREE_TYPE (new_var));
2228 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2229 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2230 }
2231 }
2232 }
2233
2234 /* Copy edges from BB into its copy constructed earlier, scale profile
2235 accordingly. Edges will be taken care of later. Assume aux
2236 pointers to point to the copies of each BB. Return true if any
2237 debug stmts are left after a statement that must end the basic block. */
2238
2239 static bool
2240 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2241 basic_block ret_bb, basic_block abnormal_goto_dest,
2242 copy_body_data *id)
2243 {
2244 basic_block new_bb = (basic_block) bb->aux;
2245 edge_iterator ei;
2246 edge old_edge;
2247 gimple_stmt_iterator si;
2248 bool need_debug_cleanup = false;
2249
2250 /* Use the indices from the original blocks to create edges for the
2251 new ones. */
2252 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2253 if (!(old_edge->flags & EDGE_EH))
2254 {
2255 edge new_edge;
2256 int flags = old_edge->flags;
2257 location_t locus = old_edge->goto_locus;
2258
2259 /* Return edges do get a FALLTHRU flag when they get inlined. */
2260 if (old_edge->dest->index == EXIT_BLOCK
2261 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2262 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2263 flags |= EDGE_FALLTHRU;
2264
2265 new_edge
2266 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2267 new_edge->probability = old_edge->probability;
2268 if (!id->reset_location)
2269 new_edge->goto_locus = remap_location (locus, id);
2270 }
2271
2272 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2273 return false;
2274
2275 /* When doing function splitting, we must decrease count of the return block
2276 which was previously reachable by block we did not copy. */
2277 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2278 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2279 if (old_edge->src->index != ENTRY_BLOCK
2280 && !old_edge->src->aux)
2281 new_bb->count -= old_edge->count ().apply_scale (num, den);
2282
2283 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2284 {
2285 gimple *copy_stmt;
2286 bool can_throw, nonlocal_goto;
2287
2288 copy_stmt = gsi_stmt (si);
2289 if (!is_gimple_debug (copy_stmt))
2290 update_stmt (copy_stmt);
2291
2292 /* Do this before the possible split_block. */
2293 gsi_next (&si);
2294
2295 /* If this tree could throw an exception, there are two
2296 cases where we need to add abnormal edge(s): the
2297 tree wasn't in a region and there is a "current
2298 region" in the caller; or the original tree had
2299 EH edges. In both cases split the block after the tree,
2300 and add abnormal edge(s) as needed; we need both
2301 those from the callee and the caller.
2302 We check whether the copy can throw, because the const
2303 propagation can change an INDIRECT_REF which throws
2304 into a COMPONENT_REF which doesn't. If the copy
2305 can throw, the original could also throw. */
2306 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2307 nonlocal_goto
2308 = (stmt_can_make_abnormal_goto (copy_stmt)
2309 && !computed_goto_p (copy_stmt));
2310
2311 if (can_throw || nonlocal_goto)
2312 {
2313 if (!gsi_end_p (si))
2314 {
2315 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2316 gsi_next (&si);
2317 if (gsi_end_p (si))
2318 need_debug_cleanup = true;
2319 }
2320 if (!gsi_end_p (si))
2321 /* Note that bb's predecessor edges aren't necessarily
2322 right at this point; split_block doesn't care. */
2323 {
2324 edge e = split_block (new_bb, copy_stmt);
2325
2326 new_bb = e->dest;
2327 new_bb->aux = e->src->aux;
2328 si = gsi_start_bb (new_bb);
2329 }
2330 }
2331
2332 bool update_probs = false;
2333
2334 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2335 {
2336 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2337 update_probs = true;
2338 }
2339 else if (can_throw)
2340 {
2341 make_eh_edges (copy_stmt);
2342 update_probs = true;
2343 }
2344
2345 /* EH edges may not match old edges. Copy as much as possible. */
2346 if (update_probs)
2347 {
2348 edge e;
2349 edge_iterator ei;
2350 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2351
2352 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2353 if ((old_edge->flags & EDGE_EH)
2354 && (e = find_edge (copy_stmt_bb,
2355 (basic_block) old_edge->dest->aux))
2356 && (e->flags & EDGE_EH))
2357 e->probability = old_edge->probability;
2358
2359 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2360 if (e->flags & EDGE_EH)
2361 {
2362 if (!e->probability.initialized_p ())
2363 e->probability = profile_probability::never ();
2364 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2365 {
2366 add_clobbers_to_eh_landing_pad (e->dest, id);
2367 id->add_clobbers_to_eh_landing_pads = 0;
2368 }
2369 }
2370 }
2371
2372
2373 /* If the call we inline cannot make abnormal goto do not add
2374 additional abnormal edges but only retain those already present
2375 in the original function body. */
2376 if (abnormal_goto_dest == NULL)
2377 nonlocal_goto = false;
2378 if (nonlocal_goto)
2379 {
2380 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2381
2382 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2383 nonlocal_goto = false;
2384 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2385 in OpenMP regions which aren't allowed to be left abnormally.
2386 So, no need to add abnormal edge in that case. */
2387 else if (is_gimple_call (copy_stmt)
2388 && gimple_call_internal_p (copy_stmt)
2389 && (gimple_call_internal_fn (copy_stmt)
2390 == IFN_ABNORMAL_DISPATCHER)
2391 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2392 nonlocal_goto = false;
2393 else
2394 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2395 EDGE_ABNORMAL);
2396 }
2397
2398 if ((can_throw || nonlocal_goto)
2399 && gimple_in_ssa_p (cfun))
2400 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2401 can_throw, nonlocal_goto);
2402 }
2403 return need_debug_cleanup;
2404 }
2405
2406 /* Copy the PHIs. All blocks and edges are copied, some blocks
2407 was possibly split and new outgoing EH edges inserted.
2408 BB points to the block of original function and AUX pointers links
2409 the original and newly copied blocks. */
2410
2411 static void
2412 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2413 {
2414 basic_block const new_bb = (basic_block) bb->aux;
2415 edge_iterator ei;
2416 gphi *phi;
2417 gphi_iterator si;
2418 edge new_edge;
2419 bool inserted = false;
2420
2421 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2422 {
2423 tree res, new_res;
2424 gphi *new_phi;
2425
2426 phi = si.phi ();
2427 res = PHI_RESULT (phi);
2428 new_res = res;
2429 if (!virtual_operand_p (res))
2430 {
2431 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2432 if (EDGE_COUNT (new_bb->preds) == 0)
2433 {
2434 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2435 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2436 }
2437 else
2438 {
2439 new_phi = create_phi_node (new_res, new_bb);
2440 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2441 {
2442 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2443 bb);
2444 tree arg;
2445 tree new_arg;
2446 edge_iterator ei2;
2447 location_t locus;
2448
2449 /* When doing partial cloning, we allow PHIs on the entry
2450 block as long as all the arguments are the same.
2451 Find any input edge to see argument to copy. */
2452 if (!old_edge)
2453 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2454 if (!old_edge->src->aux)
2455 break;
2456
2457 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2458 new_arg = arg;
2459 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2460 gcc_assert (new_arg);
2461 /* With return slot optimization we can end up with
2462 non-gimple (foo *)&this->m, fix that here. */
2463 if (TREE_CODE (new_arg) != SSA_NAME
2464 && TREE_CODE (new_arg) != FUNCTION_DECL
2465 && !is_gimple_val (new_arg))
2466 {
2467 gimple_seq stmts = NULL;
2468 new_arg = force_gimple_operand (new_arg, &stmts, true,
2469 NULL);
2470 gsi_insert_seq_on_edge (new_edge, stmts);
2471 inserted = true;
2472 }
2473 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2474 if (id->reset_location)
2475 locus = input_location;
2476 else
2477 locus = remap_location (locus, id);
2478 add_phi_arg (new_phi, new_arg, new_edge, locus);
2479 }
2480 }
2481 }
2482 }
2483
2484 /* Commit the delayed edge insertions. */
2485 if (inserted)
2486 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2487 gsi_commit_one_edge_insert (new_edge, NULL);
2488 }
2489
2490
2491 /* Wrapper for remap_decl so it can be used as a callback. */
2492
2493 static tree
2494 remap_decl_1 (tree decl, void *data)
2495 {
2496 return remap_decl (decl, (copy_body_data *) data);
2497 }
2498
2499 /* Build struct function and associated datastructures for the new clone
2500 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2501 the cfun to the function of new_fndecl (and current_function_decl too). */
2502
2503 static void
2504 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2505 {
2506 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2507
2508 if (!DECL_ARGUMENTS (new_fndecl))
2509 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2510 if (!DECL_RESULT (new_fndecl))
2511 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2512
2513 /* Register specific tree functions. */
2514 gimple_register_cfg_hooks ();
2515
2516 /* Get clean struct function. */
2517 push_struct_function (new_fndecl);
2518
2519 /* We will rebuild these, so just sanity check that they are empty. */
2520 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2521 gcc_assert (cfun->local_decls == NULL);
2522 gcc_assert (cfun->cfg == NULL);
2523 gcc_assert (cfun->decl == new_fndecl);
2524
2525 /* Copy items we preserve during cloning. */
2526 cfun->static_chain_decl = src_cfun->static_chain_decl;
2527 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2528 cfun->function_end_locus = src_cfun->function_end_locus;
2529 cfun->curr_properties = src_cfun->curr_properties;
2530 cfun->last_verified = src_cfun->last_verified;
2531 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2532 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2533 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2534 cfun->stdarg = src_cfun->stdarg;
2535 cfun->after_inlining = src_cfun->after_inlining;
2536 cfun->can_throw_non_call_exceptions
2537 = src_cfun->can_throw_non_call_exceptions;
2538 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2539 cfun->returns_struct = src_cfun->returns_struct;
2540 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2541
2542 init_empty_tree_cfg ();
2543
2544 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2545
2546 profile_count num = count;
2547 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2548 profile_count::adjust_for_ipa_scaling (&num, &den);
2549
2550 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2551 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2552 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2553 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2554 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2555 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2556 if (src_cfun->eh)
2557 init_eh_for_function ();
2558
2559 if (src_cfun->gimple_df)
2560 {
2561 init_tree_ssa (cfun);
2562 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2563 if (cfun->gimple_df->in_ssa_p)
2564 init_ssa_operands (cfun);
2565 }
2566 }
2567
2568 /* Helper function for copy_cfg_body. Move debug stmts from the end
2569 of NEW_BB to the beginning of successor basic blocks when needed. If the
2570 successor has multiple predecessors, reset them, otherwise keep
2571 their value. */
2572
2573 static void
2574 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2575 {
2576 edge e;
2577 edge_iterator ei;
2578 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2579
2580 if (gsi_end_p (si)
2581 || gsi_one_before_end_p (si)
2582 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2583 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2584 return;
2585
2586 FOR_EACH_EDGE (e, ei, new_bb->succs)
2587 {
2588 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2589 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2590 while (is_gimple_debug (gsi_stmt (ssi)))
2591 {
2592 gimple *stmt = gsi_stmt (ssi);
2593 gdebug *new_stmt;
2594 tree var;
2595 tree value;
2596
2597 /* For the last edge move the debug stmts instead of copying
2598 them. */
2599 if (ei_one_before_end_p (ei))
2600 {
2601 si = ssi;
2602 gsi_prev (&ssi);
2603 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2604 {
2605 gimple_debug_bind_reset_value (stmt);
2606 gimple_set_location (stmt, UNKNOWN_LOCATION);
2607 }
2608 gsi_remove (&si, false);
2609 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2610 continue;
2611 }
2612
2613 if (gimple_debug_bind_p (stmt))
2614 {
2615 var = gimple_debug_bind_get_var (stmt);
2616 if (single_pred_p (e->dest))
2617 {
2618 value = gimple_debug_bind_get_value (stmt);
2619 value = unshare_expr (value);
2620 new_stmt = gimple_build_debug_bind (var, value, stmt);
2621 }
2622 else
2623 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2624 }
2625 else if (gimple_debug_source_bind_p (stmt))
2626 {
2627 var = gimple_debug_source_bind_get_var (stmt);
2628 value = gimple_debug_source_bind_get_value (stmt);
2629 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2630 }
2631 else if (gimple_debug_nonbind_marker_p (stmt))
2632 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2633 else
2634 gcc_unreachable ();
2635 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2636 id->debug_stmts.safe_push (new_stmt);
2637 gsi_prev (&ssi);
2638 }
2639 }
2640 }
2641
2642 /* Make a copy of the sub-loops of SRC_PARENT and place them
2643 as siblings of DEST_PARENT. */
2644
2645 static void
2646 copy_loops (copy_body_data *id,
2647 struct loop *dest_parent, struct loop *src_parent)
2648 {
2649 struct loop *src_loop = src_parent->inner;
2650 while (src_loop)
2651 {
2652 if (!id->blocks_to_copy
2653 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2654 {
2655 struct loop *dest_loop = alloc_loop ();
2656
2657 /* Assign the new loop its header and latch and associate
2658 those with the new loop. */
2659 dest_loop->header = (basic_block)src_loop->header->aux;
2660 dest_loop->header->loop_father = dest_loop;
2661 if (src_loop->latch != NULL)
2662 {
2663 dest_loop->latch = (basic_block)src_loop->latch->aux;
2664 dest_loop->latch->loop_father = dest_loop;
2665 }
2666
2667 /* Copy loop meta-data. */
2668 copy_loop_info (src_loop, dest_loop);
2669
2670 /* Finally place it into the loop array and the loop tree. */
2671 place_new_loop (cfun, dest_loop);
2672 flow_loop_tree_node_add (dest_parent, dest_loop);
2673
2674 dest_loop->safelen = src_loop->safelen;
2675 if (src_loop->unroll)
2676 {
2677 dest_loop->unroll = src_loop->unroll;
2678 cfun->has_unroll = true;
2679 }
2680 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2681 if (src_loop->force_vectorize)
2682 {
2683 dest_loop->force_vectorize = true;
2684 cfun->has_force_vectorize_loops = true;
2685 }
2686 if (src_loop->simduid)
2687 {
2688 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2689 cfun->has_simduid_loops = true;
2690 }
2691
2692 /* Recurse. */
2693 copy_loops (id, dest_loop, src_loop);
2694 }
2695 src_loop = src_loop->next;
2696 }
2697 }
2698
2699 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2700
2701 void
2702 redirect_all_calls (copy_body_data * id, basic_block bb)
2703 {
2704 gimple_stmt_iterator si;
2705 gimple *last = last_stmt (bb);
2706 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2707 {
2708 gimple *stmt = gsi_stmt (si);
2709 if (is_gimple_call (stmt))
2710 {
2711 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2712 if (edge)
2713 {
2714 edge->redirect_call_stmt_to_callee ();
2715 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2716 gimple_purge_dead_eh_edges (bb);
2717 }
2718 }
2719 }
2720 }
2721
2722 /* Make a copy of the body of FN so that it can be inserted inline in
2723 another function. Walks FN via CFG, returns new fndecl. */
2724
2725 static tree
2726 copy_cfg_body (copy_body_data * id,
2727 basic_block entry_block_map, basic_block exit_block_map,
2728 basic_block new_entry)
2729 {
2730 tree callee_fndecl = id->src_fn;
2731 /* Original cfun for the callee, doesn't change. */
2732 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2733 struct function *cfun_to_copy;
2734 basic_block bb;
2735 tree new_fndecl = NULL;
2736 bool need_debug_cleanup = false;
2737 int last;
2738 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2739 profile_count num = entry_block_map->count;
2740
2741 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2742
2743 /* Register specific tree functions. */
2744 gimple_register_cfg_hooks ();
2745
2746 /* If we are inlining just region of the function, make sure to connect
2747 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2748 part of loop, we must compute frequency and probability of
2749 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2750 probabilities of edges incoming from nonduplicated region. */
2751 if (new_entry)
2752 {
2753 edge e;
2754 edge_iterator ei;
2755 den = profile_count::zero ();
2756
2757 FOR_EACH_EDGE (e, ei, new_entry->preds)
2758 if (!e->src->aux)
2759 den += e->count ();
2760 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2761 }
2762
2763 profile_count::adjust_for_ipa_scaling (&num, &den);
2764
2765 /* Must have a CFG here at this point. */
2766 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2767 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2768
2769
2770 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2771 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2772 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2773 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2774
2775 /* Duplicate any exception-handling regions. */
2776 if (cfun->eh)
2777 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2778 remap_decl_1, id);
2779
2780 /* Use aux pointers to map the original blocks to copy. */
2781 FOR_EACH_BB_FN (bb, cfun_to_copy)
2782 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2783 {
2784 basic_block new_bb = copy_bb (id, bb, num, den);
2785 bb->aux = new_bb;
2786 new_bb->aux = bb;
2787 new_bb->loop_father = entry_block_map->loop_father;
2788 }
2789
2790 last = last_basic_block_for_fn (cfun);
2791
2792 /* Now that we've duplicated the blocks, duplicate their edges. */
2793 basic_block abnormal_goto_dest = NULL;
2794 if (id->call_stmt
2795 && stmt_can_make_abnormal_goto (id->call_stmt))
2796 {
2797 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2798
2799 bb = gimple_bb (id->call_stmt);
2800 gsi_next (&gsi);
2801 if (gsi_end_p (gsi))
2802 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2803 }
2804 FOR_ALL_BB_FN (bb, cfun_to_copy)
2805 if (!id->blocks_to_copy
2806 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2807 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2808 abnormal_goto_dest, id);
2809
2810 if (new_entry)
2811 {
2812 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2813 EDGE_FALLTHRU);
2814 e->probability = profile_probability::always ();
2815 }
2816
2817 /* Duplicate the loop tree, if available and wanted. */
2818 if (loops_for_fn (src_cfun) != NULL
2819 && current_loops != NULL)
2820 {
2821 copy_loops (id, entry_block_map->loop_father,
2822 get_loop (src_cfun, 0));
2823 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2824 loops_state_set (LOOPS_NEED_FIXUP);
2825 }
2826
2827 /* If the loop tree in the source function needed fixup, mark the
2828 destination loop tree for fixup, too. */
2829 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2830 loops_state_set (LOOPS_NEED_FIXUP);
2831
2832 if (gimple_in_ssa_p (cfun))
2833 FOR_ALL_BB_FN (bb, cfun_to_copy)
2834 if (!id->blocks_to_copy
2835 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2836 copy_phis_for_bb (bb, id);
2837
2838 FOR_ALL_BB_FN (bb, cfun_to_copy)
2839 if (bb->aux)
2840 {
2841 if (need_debug_cleanup
2842 && bb->index != ENTRY_BLOCK
2843 && bb->index != EXIT_BLOCK)
2844 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2845 /* Update call edge destinations. This cannot be done before loop
2846 info is updated, because we may split basic blocks. */
2847 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2848 && bb->index != ENTRY_BLOCK
2849 && bb->index != EXIT_BLOCK)
2850 redirect_all_calls (id, (basic_block)bb->aux);
2851 ((basic_block)bb->aux)->aux = NULL;
2852 bb->aux = NULL;
2853 }
2854
2855 /* Zero out AUX fields of newly created block during EH edge
2856 insertion. */
2857 for (; last < last_basic_block_for_fn (cfun); last++)
2858 {
2859 if (need_debug_cleanup)
2860 maybe_move_debug_stmts_to_successors (id,
2861 BASIC_BLOCK_FOR_FN (cfun, last));
2862 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2863 /* Update call edge destinations. This cannot be done before loop
2864 info is updated, because we may split basic blocks. */
2865 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2866 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2867 }
2868 entry_block_map->aux = NULL;
2869 exit_block_map->aux = NULL;
2870
2871 if (id->eh_map)
2872 {
2873 delete id->eh_map;
2874 id->eh_map = NULL;
2875 }
2876 if (id->dependence_map)
2877 {
2878 delete id->dependence_map;
2879 id->dependence_map = NULL;
2880 }
2881
2882 return new_fndecl;
2883 }
2884
2885 /* Copy the debug STMT using ID. We deal with these statements in a
2886 special way: if any variable in their VALUE expression wasn't
2887 remapped yet, we won't remap it, because that would get decl uids
2888 out of sync, causing codegen differences between -g and -g0. If
2889 this arises, we drop the VALUE expression altogether. */
2890
2891 static void
2892 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2893 {
2894 tree t, *n;
2895 struct walk_stmt_info wi;
2896
2897 if (gimple_block (stmt))
2898 {
2899 n = id->decl_map->get (gimple_block (stmt));
2900 gimple_set_block (stmt, n ? *n : id->block);
2901 }
2902
2903 if (gimple_debug_nonbind_marker_p (stmt))
2904 return;
2905
2906 /* Remap all the operands in COPY. */
2907 memset (&wi, 0, sizeof (wi));
2908 wi.info = id;
2909
2910 processing_debug_stmt = 1;
2911
2912 if (gimple_debug_source_bind_p (stmt))
2913 t = gimple_debug_source_bind_get_var (stmt);
2914 else if (gimple_debug_bind_p (stmt))
2915 t = gimple_debug_bind_get_var (stmt);
2916 else
2917 gcc_unreachable ();
2918
2919 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2920 && (n = id->debug_map->get (t)))
2921 {
2922 gcc_assert (VAR_P (*n));
2923 t = *n;
2924 }
2925 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
2926 /* T is a non-localized variable. */;
2927 else
2928 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2929
2930 if (gimple_debug_bind_p (stmt))
2931 {
2932 gimple_debug_bind_set_var (stmt, t);
2933
2934 if (gimple_debug_bind_has_value_p (stmt))
2935 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2936 remap_gimple_op_r, &wi, NULL);
2937
2938 /* Punt if any decl couldn't be remapped. */
2939 if (processing_debug_stmt < 0)
2940 gimple_debug_bind_reset_value (stmt);
2941 }
2942 else if (gimple_debug_source_bind_p (stmt))
2943 {
2944 gimple_debug_source_bind_set_var (stmt, t);
2945 /* When inlining and source bind refers to one of the optimized
2946 away parameters, change the source bind into normal debug bind
2947 referring to the corresponding DEBUG_EXPR_DECL that should have
2948 been bound before the call stmt. */
2949 t = gimple_debug_source_bind_get_value (stmt);
2950 if (t != NULL_TREE
2951 && TREE_CODE (t) == PARM_DECL
2952 && id->call_stmt)
2953 {
2954 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2955 unsigned int i;
2956 if (debug_args != NULL)
2957 {
2958 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2959 if ((**debug_args)[i] == DECL_ORIGIN (t)
2960 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2961 {
2962 t = (**debug_args)[i + 1];
2963 stmt->subcode = GIMPLE_DEBUG_BIND;
2964 gimple_debug_bind_set_value (stmt, t);
2965 break;
2966 }
2967 }
2968 }
2969 if (gimple_debug_source_bind_p (stmt))
2970 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2971 remap_gimple_op_r, &wi, NULL);
2972 }
2973
2974 processing_debug_stmt = 0;
2975
2976 update_stmt (stmt);
2977 }
2978
2979 /* Process deferred debug stmts. In order to give values better odds
2980 of being successfully remapped, we delay the processing of debug
2981 stmts until all other stmts that might require remapping are
2982 processed. */
2983
2984 static void
2985 copy_debug_stmts (copy_body_data *id)
2986 {
2987 size_t i;
2988 gdebug *stmt;
2989
2990 if (!id->debug_stmts.exists ())
2991 return;
2992
2993 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2994 copy_debug_stmt (stmt, id);
2995
2996 id->debug_stmts.release ();
2997 }
2998
2999 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3000 another function. */
3001
3002 static tree
3003 copy_tree_body (copy_body_data *id)
3004 {
3005 tree fndecl = id->src_fn;
3006 tree body = DECL_SAVED_TREE (fndecl);
3007
3008 walk_tree (&body, copy_tree_body_r, id, NULL);
3009
3010 return body;
3011 }
3012
3013 /* Make a copy of the body of FN so that it can be inserted inline in
3014 another function. */
3015
3016 static tree
3017 copy_body (copy_body_data *id,
3018 basic_block entry_block_map, basic_block exit_block_map,
3019 basic_block new_entry)
3020 {
3021 tree fndecl = id->src_fn;
3022 tree body;
3023
3024 /* If this body has a CFG, walk CFG and copy. */
3025 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3026 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3027 new_entry);
3028 copy_debug_stmts (id);
3029
3030 return body;
3031 }
3032
3033 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3034 defined in function FN, or of a data member thereof. */
3035
3036 static bool
3037 self_inlining_addr_expr (tree value, tree fn)
3038 {
3039 tree var;
3040
3041 if (TREE_CODE (value) != ADDR_EXPR)
3042 return false;
3043
3044 var = get_base_address (TREE_OPERAND (value, 0));
3045
3046 return var && auto_var_in_fn_p (var, fn);
3047 }
3048
3049 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3050 lexical block and line number information from base_stmt, if given,
3051 or from the last stmt of the block otherwise. */
3052
3053 static gimple *
3054 insert_init_debug_bind (copy_body_data *id,
3055 basic_block bb, tree var, tree value,
3056 gimple *base_stmt)
3057 {
3058 gimple *note;
3059 gimple_stmt_iterator gsi;
3060 tree tracked_var;
3061
3062 if (!gimple_in_ssa_p (id->src_cfun))
3063 return NULL;
3064
3065 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3066 return NULL;
3067
3068 tracked_var = target_for_debug_bind (var);
3069 if (!tracked_var)
3070 return NULL;
3071
3072 if (bb)
3073 {
3074 gsi = gsi_last_bb (bb);
3075 if (!base_stmt && !gsi_end_p (gsi))
3076 base_stmt = gsi_stmt (gsi);
3077 }
3078
3079 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3080
3081 if (bb)
3082 {
3083 if (!gsi_end_p (gsi))
3084 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3085 else
3086 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3087 }
3088
3089 return note;
3090 }
3091
3092 static void
3093 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3094 {
3095 /* If VAR represents a zero-sized variable, it's possible that the
3096 assignment statement may result in no gimple statements. */
3097 if (init_stmt)
3098 {
3099 gimple_stmt_iterator si = gsi_last_bb (bb);
3100
3101 /* We can end up with init statements that store to a non-register
3102 from a rhs with a conversion. Handle that here by forcing the
3103 rhs into a temporary. gimple_regimplify_operands is not
3104 prepared to do this for us. */
3105 if (!is_gimple_debug (init_stmt)
3106 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3107 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3108 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3109 {
3110 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3111 gimple_expr_type (init_stmt),
3112 gimple_assign_rhs1 (init_stmt));
3113 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3114 GSI_NEW_STMT);
3115 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3116 gimple_assign_set_rhs1 (init_stmt, rhs);
3117 }
3118 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3119 gimple_regimplify_operands (init_stmt, &si);
3120
3121 if (!is_gimple_debug (init_stmt))
3122 {
3123 tree def = gimple_assign_lhs (init_stmt);
3124 insert_init_debug_bind (id, bb, def, def, init_stmt);
3125 }
3126 }
3127 }
3128
3129 /* Initialize parameter P with VALUE. If needed, produce init statement
3130 at the end of BB. When BB is NULL, we return init statement to be
3131 output later. */
3132 static gimple *
3133 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3134 basic_block bb, tree *vars)
3135 {
3136 gimple *init_stmt = NULL;
3137 tree var;
3138 tree rhs = value;
3139 tree def = (gimple_in_ssa_p (cfun)
3140 ? ssa_default_def (id->src_cfun, p) : NULL);
3141
3142 if (value
3143 && value != error_mark_node
3144 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3145 {
3146 /* If we can match up types by promotion/demotion do so. */
3147 if (fold_convertible_p (TREE_TYPE (p), value))
3148 rhs = fold_convert (TREE_TYPE (p), value);
3149 else
3150 {
3151 /* ??? For valid programs we should not end up here.
3152 Still if we end up with truly mismatched types here, fall back
3153 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3154 GIMPLE to the following passes. */
3155 if (!is_gimple_reg_type (TREE_TYPE (value))
3156 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3157 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3158 else
3159 rhs = build_zero_cst (TREE_TYPE (p));
3160 }
3161 }
3162
3163 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3164 here since the type of this decl must be visible to the calling
3165 function. */
3166 var = copy_decl_to_var (p, id);
3167
3168 /* Declare this new variable. */
3169 DECL_CHAIN (var) = *vars;
3170 *vars = var;
3171
3172 /* Make gimplifier happy about this variable. */
3173 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3174
3175 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3176 we would not need to create a new variable here at all, if it
3177 weren't for debug info. Still, we can just use the argument
3178 value. */
3179 if (TREE_READONLY (p)
3180 && !TREE_ADDRESSABLE (p)
3181 && value && !TREE_SIDE_EFFECTS (value)
3182 && !def)
3183 {
3184 /* We may produce non-gimple trees by adding NOPs or introduce
3185 invalid sharing when operand is not really constant.
3186 It is not big deal to prohibit constant propagation here as
3187 we will constant propagate in DOM1 pass anyway. */
3188 if (is_gimple_min_invariant (value)
3189 && useless_type_conversion_p (TREE_TYPE (p),
3190 TREE_TYPE (value))
3191 /* We have to be very careful about ADDR_EXPR. Make sure
3192 the base variable isn't a local variable of the inlined
3193 function, e.g., when doing recursive inlining, direct or
3194 mutually-recursive or whatever, which is why we don't
3195 just test whether fn == current_function_decl. */
3196 && ! self_inlining_addr_expr (value, fn))
3197 {
3198 insert_decl_map (id, p, value);
3199 insert_debug_decl_map (id, p, var);
3200 return insert_init_debug_bind (id, bb, var, value, NULL);
3201 }
3202 }
3203
3204 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3205 that way, when the PARM_DECL is encountered, it will be
3206 automatically replaced by the VAR_DECL. */
3207 insert_decl_map (id, p, var);
3208
3209 /* Even if P was TREE_READONLY, the new VAR should not be.
3210 In the original code, we would have constructed a
3211 temporary, and then the function body would have never
3212 changed the value of P. However, now, we will be
3213 constructing VAR directly. The constructor body may
3214 change its value multiple times as it is being
3215 constructed. Therefore, it must not be TREE_READONLY;
3216 the back-end assumes that TREE_READONLY variable is
3217 assigned to only once. */
3218 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3219 TREE_READONLY (var) = 0;
3220
3221 /* If there is no setup required and we are in SSA, take the easy route
3222 replacing all SSA names representing the function parameter by the
3223 SSA name passed to function.
3224
3225 We need to construct map for the variable anyway as it might be used
3226 in different SSA names when parameter is set in function.
3227
3228 Do replacement at -O0 for const arguments replaced by constant.
3229 This is important for builtin_constant_p and other construct requiring
3230 constant argument to be visible in inlined function body. */
3231 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3232 && (optimize
3233 || (TREE_READONLY (p)
3234 && is_gimple_min_invariant (rhs)))
3235 && (TREE_CODE (rhs) == SSA_NAME
3236 || is_gimple_min_invariant (rhs))
3237 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3238 {
3239 insert_decl_map (id, def, rhs);
3240 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3241 }
3242
3243 /* If the value of argument is never used, don't care about initializing
3244 it. */
3245 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3246 {
3247 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3248 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3249 }
3250
3251 /* Initialize this VAR_DECL from the equivalent argument. Convert
3252 the argument to the proper type in case it was promoted. */
3253 if (value)
3254 {
3255 if (rhs == error_mark_node)
3256 {
3257 insert_decl_map (id, p, var);
3258 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3259 }
3260
3261 STRIP_USELESS_TYPE_CONVERSION (rhs);
3262
3263 /* If we are in SSA form properly remap the default definition
3264 or assign to a dummy SSA name if the parameter is unused and
3265 we are not optimizing. */
3266 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3267 {
3268 if (def)
3269 {
3270 def = remap_ssa_name (def, id);
3271 init_stmt = gimple_build_assign (def, rhs);
3272 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3273 set_ssa_default_def (cfun, var, NULL);
3274 }
3275 else if (!optimize)
3276 {
3277 def = make_ssa_name (var);
3278 init_stmt = gimple_build_assign (def, rhs);
3279 }
3280 }
3281 else
3282 init_stmt = gimple_build_assign (var, rhs);
3283
3284 if (bb && init_stmt)
3285 insert_init_stmt (id, bb, init_stmt);
3286 }
3287 return init_stmt;
3288 }
3289
3290 /* Generate code to initialize the parameters of the function at the
3291 top of the stack in ID from the GIMPLE_CALL STMT. */
3292
3293 static void
3294 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3295 tree fn, basic_block bb)
3296 {
3297 tree parms;
3298 size_t i;
3299 tree p;
3300 tree vars = NULL_TREE;
3301 tree static_chain = gimple_call_chain (stmt);
3302
3303 /* Figure out what the parameters are. */
3304 parms = DECL_ARGUMENTS (fn);
3305
3306 /* Loop through the parameter declarations, replacing each with an
3307 equivalent VAR_DECL, appropriately initialized. */
3308 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3309 {
3310 tree val;
3311 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3312 setup_one_parameter (id, p, val, fn, bb, &vars);
3313 }
3314 /* After remapping parameters remap their types. This has to be done
3315 in a second loop over all parameters to appropriately remap
3316 variable sized arrays when the size is specified in a
3317 parameter following the array. */
3318 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3319 {
3320 tree *varp = id->decl_map->get (p);
3321 if (varp && VAR_P (*varp))
3322 {
3323 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3324 ? ssa_default_def (id->src_cfun, p) : NULL);
3325 tree var = *varp;
3326 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3327 /* Also remap the default definition if it was remapped
3328 to the default definition of the parameter replacement
3329 by the parameter setup. */
3330 if (def)
3331 {
3332 tree *defp = id->decl_map->get (def);
3333 if (defp
3334 && TREE_CODE (*defp) == SSA_NAME
3335 && SSA_NAME_VAR (*defp) == var)
3336 TREE_TYPE (*defp) = TREE_TYPE (var);
3337 }
3338 }
3339 }
3340
3341 /* Initialize the static chain. */
3342 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3343 gcc_assert (fn != current_function_decl);
3344 if (p)
3345 {
3346 /* No static chain? Seems like a bug in tree-nested.c. */
3347 gcc_assert (static_chain);
3348
3349 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3350 }
3351
3352 declare_inline_vars (id->block, vars);
3353 }
3354
3355
3356 /* Declare a return variable to replace the RESULT_DECL for the
3357 function we are calling. An appropriate DECL_STMT is returned.
3358 The USE_STMT is filled to contain a use of the declaration to
3359 indicate the return value of the function.
3360
3361 RETURN_SLOT, if non-null is place where to store the result. It
3362 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3363 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3364
3365 The return value is a (possibly null) value that holds the result
3366 as seen by the caller. */
3367
3368 static tree
3369 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3370 basic_block entry_bb)
3371 {
3372 tree callee = id->src_fn;
3373 tree result = DECL_RESULT (callee);
3374 tree callee_type = TREE_TYPE (result);
3375 tree caller_type;
3376 tree var, use;
3377
3378 /* Handle type-mismatches in the function declaration return type
3379 vs. the call expression. */
3380 if (modify_dest)
3381 caller_type = TREE_TYPE (modify_dest);
3382 else
3383 caller_type = TREE_TYPE (TREE_TYPE (callee));
3384
3385 /* We don't need to do anything for functions that don't return anything. */
3386 if (VOID_TYPE_P (callee_type))
3387 return NULL_TREE;
3388
3389 /* If there was a return slot, then the return value is the
3390 dereferenced address of that object. */
3391 if (return_slot)
3392 {
3393 /* The front end shouldn't have used both return_slot and
3394 a modify expression. */
3395 gcc_assert (!modify_dest);
3396 if (DECL_BY_REFERENCE (result))
3397 {
3398 tree return_slot_addr = build_fold_addr_expr (return_slot);
3399 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3400
3401 /* We are going to construct *&return_slot and we can't do that
3402 for variables believed to be not addressable.
3403
3404 FIXME: This check possibly can match, because values returned
3405 via return slot optimization are not believed to have address
3406 taken by alias analysis. */
3407 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3408 var = return_slot_addr;
3409 }
3410 else
3411 {
3412 var = return_slot;
3413 gcc_assert (TREE_CODE (var) != SSA_NAME);
3414 if (TREE_ADDRESSABLE (result))
3415 mark_addressable (var);
3416 }
3417 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3418 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3419 && !DECL_GIMPLE_REG_P (result)
3420 && DECL_P (var))
3421 DECL_GIMPLE_REG_P (var) = 0;
3422 use = NULL;
3423 goto done;
3424 }
3425
3426 /* All types requiring non-trivial constructors should have been handled. */
3427 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3428
3429 /* Attempt to avoid creating a new temporary variable. */
3430 if (modify_dest
3431 && TREE_CODE (modify_dest) != SSA_NAME)
3432 {
3433 bool use_it = false;
3434
3435 /* We can't use MODIFY_DEST if there's type promotion involved. */
3436 if (!useless_type_conversion_p (callee_type, caller_type))
3437 use_it = false;
3438
3439 /* ??? If we're assigning to a variable sized type, then we must
3440 reuse the destination variable, because we've no good way to
3441 create variable sized temporaries at this point. */
3442 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3443 use_it = true;
3444
3445 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3446 reuse it as the result of the call directly. Don't do this if
3447 it would promote MODIFY_DEST to addressable. */
3448 else if (TREE_ADDRESSABLE (result))
3449 use_it = false;
3450 else
3451 {
3452 tree base_m = get_base_address (modify_dest);
3453
3454 /* If the base isn't a decl, then it's a pointer, and we don't
3455 know where that's going to go. */
3456 if (!DECL_P (base_m))
3457 use_it = false;
3458 else if (is_global_var (base_m))
3459 use_it = false;
3460 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3461 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3462 && !DECL_GIMPLE_REG_P (result)
3463 && DECL_GIMPLE_REG_P (base_m))
3464 use_it = false;
3465 else if (!TREE_ADDRESSABLE (base_m))
3466 use_it = true;
3467 }
3468
3469 if (use_it)
3470 {
3471 var = modify_dest;
3472 use = NULL;
3473 goto done;
3474 }
3475 }
3476
3477 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3478
3479 var = copy_result_decl_to_var (result, id);
3480 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3481
3482 /* Do not have the rest of GCC warn about this variable as it should
3483 not be visible to the user. */
3484 TREE_NO_WARNING (var) = 1;
3485
3486 declare_inline_vars (id->block, var);
3487
3488 /* Build the use expr. If the return type of the function was
3489 promoted, convert it back to the expected type. */
3490 use = var;
3491 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3492 {
3493 /* If we can match up types by promotion/demotion do so. */
3494 if (fold_convertible_p (caller_type, var))
3495 use = fold_convert (caller_type, var);
3496 else
3497 {
3498 /* ??? For valid programs we should not end up here.
3499 Still if we end up with truly mismatched types here, fall back
3500 to using a MEM_REF to not leak invalid GIMPLE to the following
3501 passes. */
3502 /* Prevent var from being written into SSA form. */
3503 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3504 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3505 DECL_GIMPLE_REG_P (var) = false;
3506 else if (is_gimple_reg_type (TREE_TYPE (var)))
3507 TREE_ADDRESSABLE (var) = true;
3508 use = fold_build2 (MEM_REF, caller_type,
3509 build_fold_addr_expr (var),
3510 build_int_cst (ptr_type_node, 0));
3511 }
3512 }
3513
3514 STRIP_USELESS_TYPE_CONVERSION (use);
3515
3516 if (DECL_BY_REFERENCE (result))
3517 {
3518 TREE_ADDRESSABLE (var) = 1;
3519 var = build_fold_addr_expr (var);
3520 }
3521
3522 done:
3523 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3524 way, when the RESULT_DECL is encountered, it will be
3525 automatically replaced by the VAR_DECL.
3526
3527 When returning by reference, ensure that RESULT_DECL remaps to
3528 gimple_val. */
3529 if (DECL_BY_REFERENCE (result)
3530 && !is_gimple_val (var))
3531 {
3532 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3533 insert_decl_map (id, result, temp);
3534 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3535 it's default_def SSA_NAME. */
3536 if (gimple_in_ssa_p (id->src_cfun)
3537 && is_gimple_reg (result))
3538 {
3539 temp = make_ssa_name (temp);
3540 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3541 }
3542 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3543 }
3544 else
3545 insert_decl_map (id, result, var);
3546
3547 /* Remember this so we can ignore it in remap_decls. */
3548 id->retvar = var;
3549 return use;
3550 }
3551
3552 /* Determine if the function can be copied. If so return NULL. If
3553 not return a string describng the reason for failure. */
3554
3555 const char *
3556 copy_forbidden (struct function *fun)
3557 {
3558 const char *reason = fun->cannot_be_copied_reason;
3559
3560 /* Only examine the function once. */
3561 if (fun->cannot_be_copied_set)
3562 return reason;
3563
3564 /* We cannot copy a function that receives a non-local goto
3565 because we cannot remap the destination label used in the
3566 function that is performing the non-local goto. */
3567 /* ??? Actually, this should be possible, if we work at it.
3568 No doubt there's just a handful of places that simply
3569 assume it doesn't happen and don't substitute properly. */
3570 if (fun->has_nonlocal_label)
3571 {
3572 reason = G_("function %q+F can never be copied "
3573 "because it receives a non-local goto");
3574 goto fail;
3575 }
3576
3577 if (fun->has_forced_label_in_static)
3578 {
3579 reason = G_("function %q+F can never be copied because it saves "
3580 "address of local label in a static variable");
3581 goto fail;
3582 }
3583
3584 fail:
3585 fun->cannot_be_copied_reason = reason;
3586 fun->cannot_be_copied_set = true;
3587 return reason;
3588 }
3589
3590
3591 static const char *inline_forbidden_reason;
3592
3593 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3594 iff a function cannot be inlined. Also sets the reason why. */
3595
3596 static tree
3597 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3598 struct walk_stmt_info *wip)
3599 {
3600 tree fn = (tree) wip->info;
3601 tree t;
3602 gimple *stmt = gsi_stmt (*gsi);
3603
3604 switch (gimple_code (stmt))
3605 {
3606 case GIMPLE_CALL:
3607 /* Refuse to inline alloca call unless user explicitly forced so as
3608 this may change program's memory overhead drastically when the
3609 function using alloca is called in loop. In GCC present in
3610 SPEC2000 inlining into schedule_block cause it to require 2GB of
3611 RAM instead of 256MB. Don't do so for alloca calls emitted for
3612 VLA objects as those can't cause unbounded growth (they're always
3613 wrapped inside stack_save/stack_restore regions. */
3614 if (gimple_maybe_alloca_call_p (stmt)
3615 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3616 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3617 {
3618 inline_forbidden_reason
3619 = G_("function %q+F can never be inlined because it uses "
3620 "alloca (override using the always_inline attribute)");
3621 *handled_ops_p = true;
3622 return fn;
3623 }
3624
3625 t = gimple_call_fndecl (stmt);
3626 if (t == NULL_TREE)
3627 break;
3628
3629 /* We cannot inline functions that call setjmp. */
3630 if (setjmp_call_p (t))
3631 {
3632 inline_forbidden_reason
3633 = G_("function %q+F can never be inlined because it uses setjmp");
3634 *handled_ops_p = true;
3635 return t;
3636 }
3637
3638 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3639 switch (DECL_FUNCTION_CODE (t))
3640 {
3641 /* We cannot inline functions that take a variable number of
3642 arguments. */
3643 case BUILT_IN_VA_START:
3644 case BUILT_IN_NEXT_ARG:
3645 case BUILT_IN_VA_END:
3646 inline_forbidden_reason
3647 = G_("function %q+F can never be inlined because it "
3648 "uses variable argument lists");
3649 *handled_ops_p = true;
3650 return t;
3651
3652 case BUILT_IN_LONGJMP:
3653 /* We can't inline functions that call __builtin_longjmp at
3654 all. The non-local goto machinery really requires the
3655 destination be in a different function. If we allow the
3656 function calling __builtin_longjmp to be inlined into the
3657 function calling __builtin_setjmp, Things will Go Awry. */
3658 inline_forbidden_reason
3659 = G_("function %q+F can never be inlined because "
3660 "it uses setjmp-longjmp exception handling");
3661 *handled_ops_p = true;
3662 return t;
3663
3664 case BUILT_IN_NONLOCAL_GOTO:
3665 /* Similarly. */
3666 inline_forbidden_reason
3667 = G_("function %q+F can never be inlined because "
3668 "it uses non-local goto");
3669 *handled_ops_p = true;
3670 return t;
3671
3672 case BUILT_IN_RETURN:
3673 case BUILT_IN_APPLY_ARGS:
3674 /* If a __builtin_apply_args caller would be inlined,
3675 it would be saving arguments of the function it has
3676 been inlined into. Similarly __builtin_return would
3677 return from the function the inline has been inlined into. */
3678 inline_forbidden_reason
3679 = G_("function %q+F can never be inlined because "
3680 "it uses __builtin_return or __builtin_apply_args");
3681 *handled_ops_p = true;
3682 return t;
3683
3684 default:
3685 break;
3686 }
3687 break;
3688
3689 case GIMPLE_GOTO:
3690 t = gimple_goto_dest (stmt);
3691
3692 /* We will not inline a function which uses computed goto. The
3693 addresses of its local labels, which may be tucked into
3694 global storage, are of course not constant across
3695 instantiations, which causes unexpected behavior. */
3696 if (TREE_CODE (t) != LABEL_DECL)
3697 {
3698 inline_forbidden_reason
3699 = G_("function %q+F can never be inlined "
3700 "because it contains a computed goto");
3701 *handled_ops_p = true;
3702 return t;
3703 }
3704 break;
3705
3706 default:
3707 break;
3708 }
3709
3710 *handled_ops_p = false;
3711 return NULL_TREE;
3712 }
3713
3714 /* Return true if FNDECL is a function that cannot be inlined into
3715 another one. */
3716
3717 static bool
3718 inline_forbidden_p (tree fndecl)
3719 {
3720 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3721 struct walk_stmt_info wi;
3722 basic_block bb;
3723 bool forbidden_p = false;
3724
3725 /* First check for shared reasons not to copy the code. */
3726 inline_forbidden_reason = copy_forbidden (fun);
3727 if (inline_forbidden_reason != NULL)
3728 return true;
3729
3730 /* Next, walk the statements of the function looking for
3731 constraucts we can't handle, or are non-optimal for inlining. */
3732 hash_set<tree> visited_nodes;
3733 memset (&wi, 0, sizeof (wi));
3734 wi.info = (void *) fndecl;
3735 wi.pset = &visited_nodes;
3736
3737 FOR_EACH_BB_FN (bb, fun)
3738 {
3739 gimple *ret;
3740 gimple_seq seq = bb_seq (bb);
3741 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3742 forbidden_p = (ret != NULL);
3743 if (forbidden_p)
3744 break;
3745 }
3746
3747 return forbidden_p;
3748 }
3749 \f
3750 /* Return false if the function FNDECL cannot be inlined on account of its
3751 attributes, true otherwise. */
3752 static bool
3753 function_attribute_inlinable_p (const_tree fndecl)
3754 {
3755 if (targetm.attribute_table)
3756 {
3757 const_tree a;
3758
3759 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3760 {
3761 const_tree name = TREE_PURPOSE (a);
3762 int i;
3763
3764 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3765 if (is_attribute_p (targetm.attribute_table[i].name, name))
3766 return targetm.function_attribute_inlinable_p (fndecl);
3767 }
3768 }
3769
3770 return true;
3771 }
3772
3773 /* Returns nonzero if FN is a function that does not have any
3774 fundamental inline blocking properties. */
3775
3776 bool
3777 tree_inlinable_function_p (tree fn)
3778 {
3779 bool inlinable = true;
3780 bool do_warning;
3781 tree always_inline;
3782
3783 /* If we've already decided this function shouldn't be inlined,
3784 there's no need to check again. */
3785 if (DECL_UNINLINABLE (fn))
3786 return false;
3787
3788 /* We only warn for functions declared `inline' by the user. */
3789 do_warning = (warn_inline
3790 && DECL_DECLARED_INLINE_P (fn)
3791 && !DECL_NO_INLINE_WARNING_P (fn)
3792 && !DECL_IN_SYSTEM_HEADER (fn));
3793
3794 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3795
3796 if (flag_no_inline
3797 && always_inline == NULL)
3798 {
3799 if (do_warning)
3800 warning (OPT_Winline, "function %q+F can never be inlined because it "
3801 "is suppressed using -fno-inline", fn);
3802 inlinable = false;
3803 }
3804
3805 else if (!function_attribute_inlinable_p (fn))
3806 {
3807 if (do_warning)
3808 warning (OPT_Winline, "function %q+F can never be inlined because it "
3809 "uses attributes conflicting with inlining", fn);
3810 inlinable = false;
3811 }
3812
3813 else if (inline_forbidden_p (fn))
3814 {
3815 /* See if we should warn about uninlinable functions. Previously,
3816 some of these warnings would be issued while trying to expand
3817 the function inline, but that would cause multiple warnings
3818 about functions that would for example call alloca. But since
3819 this a property of the function, just one warning is enough.
3820 As a bonus we can now give more details about the reason why a
3821 function is not inlinable. */
3822 if (always_inline)
3823 error (inline_forbidden_reason, fn);
3824 else if (do_warning)
3825 warning (OPT_Winline, inline_forbidden_reason, fn);
3826
3827 inlinable = false;
3828 }
3829
3830 /* Squirrel away the result so that we don't have to check again. */
3831 DECL_UNINLINABLE (fn) = !inlinable;
3832
3833 return inlinable;
3834 }
3835
3836 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3837 word size and take possible memcpy call into account and return
3838 cost based on whether optimizing for size or speed according to SPEED_P. */
3839
3840 int
3841 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3842 {
3843 HOST_WIDE_INT size;
3844
3845 gcc_assert (!VOID_TYPE_P (type));
3846
3847 if (TREE_CODE (type) == VECTOR_TYPE)
3848 {
3849 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3850 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3851 int orig_mode_size
3852 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3853 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3854 return ((orig_mode_size + simd_mode_size - 1)
3855 / simd_mode_size);
3856 }
3857
3858 size = int_size_in_bytes (type);
3859
3860 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3861 /* Cost of a memcpy call, 3 arguments and the call. */
3862 return 4;
3863 else
3864 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3865 }
3866
3867 /* Returns cost of operation CODE, according to WEIGHTS */
3868
3869 static int
3870 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3871 tree op1 ATTRIBUTE_UNUSED, tree op2)
3872 {
3873 switch (code)
3874 {
3875 /* These are "free" conversions, or their presumed cost
3876 is folded into other operations. */
3877 case RANGE_EXPR:
3878 CASE_CONVERT:
3879 case COMPLEX_EXPR:
3880 case PAREN_EXPR:
3881 case VIEW_CONVERT_EXPR:
3882 return 0;
3883
3884 /* Assign cost of 1 to usual operations.
3885 ??? We may consider mapping RTL costs to this. */
3886 case COND_EXPR:
3887 case VEC_COND_EXPR:
3888 case VEC_PERM_EXPR:
3889
3890 case PLUS_EXPR:
3891 case POINTER_PLUS_EXPR:
3892 case POINTER_DIFF_EXPR:
3893 case MINUS_EXPR:
3894 case MULT_EXPR:
3895 case MULT_HIGHPART_EXPR:
3896
3897 case ADDR_SPACE_CONVERT_EXPR:
3898 case FIXED_CONVERT_EXPR:
3899 case FIX_TRUNC_EXPR:
3900
3901 case NEGATE_EXPR:
3902 case FLOAT_EXPR:
3903 case MIN_EXPR:
3904 case MAX_EXPR:
3905 case ABS_EXPR:
3906 case ABSU_EXPR:
3907
3908 case LSHIFT_EXPR:
3909 case RSHIFT_EXPR:
3910 case LROTATE_EXPR:
3911 case RROTATE_EXPR:
3912
3913 case BIT_IOR_EXPR:
3914 case BIT_XOR_EXPR:
3915 case BIT_AND_EXPR:
3916 case BIT_NOT_EXPR:
3917
3918 case TRUTH_ANDIF_EXPR:
3919 case TRUTH_ORIF_EXPR:
3920 case TRUTH_AND_EXPR:
3921 case TRUTH_OR_EXPR:
3922 case TRUTH_XOR_EXPR:
3923 case TRUTH_NOT_EXPR:
3924
3925 case LT_EXPR:
3926 case LE_EXPR:
3927 case GT_EXPR:
3928 case GE_EXPR:
3929 case EQ_EXPR:
3930 case NE_EXPR:
3931 case ORDERED_EXPR:
3932 case UNORDERED_EXPR:
3933
3934 case UNLT_EXPR:
3935 case UNLE_EXPR:
3936 case UNGT_EXPR:
3937 case UNGE_EXPR:
3938 case UNEQ_EXPR:
3939 case LTGT_EXPR:
3940
3941 case CONJ_EXPR:
3942
3943 case PREDECREMENT_EXPR:
3944 case PREINCREMENT_EXPR:
3945 case POSTDECREMENT_EXPR:
3946 case POSTINCREMENT_EXPR:
3947
3948 case REALIGN_LOAD_EXPR:
3949
3950 case WIDEN_SUM_EXPR:
3951 case WIDEN_MULT_EXPR:
3952 case DOT_PROD_EXPR:
3953 case SAD_EXPR:
3954 case WIDEN_MULT_PLUS_EXPR:
3955 case WIDEN_MULT_MINUS_EXPR:
3956 case WIDEN_LSHIFT_EXPR:
3957
3958 case VEC_WIDEN_MULT_HI_EXPR:
3959 case VEC_WIDEN_MULT_LO_EXPR:
3960 case VEC_WIDEN_MULT_EVEN_EXPR:
3961 case VEC_WIDEN_MULT_ODD_EXPR:
3962 case VEC_UNPACK_HI_EXPR:
3963 case VEC_UNPACK_LO_EXPR:
3964 case VEC_UNPACK_FLOAT_HI_EXPR:
3965 case VEC_UNPACK_FLOAT_LO_EXPR:
3966 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3967 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3968 case VEC_PACK_TRUNC_EXPR:
3969 case VEC_PACK_SAT_EXPR:
3970 case VEC_PACK_FIX_TRUNC_EXPR:
3971 case VEC_PACK_FLOAT_EXPR:
3972 case VEC_WIDEN_LSHIFT_HI_EXPR:
3973 case VEC_WIDEN_LSHIFT_LO_EXPR:
3974 case VEC_DUPLICATE_EXPR:
3975 case VEC_SERIES_EXPR:
3976
3977 return 1;
3978
3979 /* Few special cases of expensive operations. This is useful
3980 to avoid inlining on functions having too many of these. */
3981 case TRUNC_DIV_EXPR:
3982 case CEIL_DIV_EXPR:
3983 case FLOOR_DIV_EXPR:
3984 case ROUND_DIV_EXPR:
3985 case EXACT_DIV_EXPR:
3986 case TRUNC_MOD_EXPR:
3987 case CEIL_MOD_EXPR:
3988 case FLOOR_MOD_EXPR:
3989 case ROUND_MOD_EXPR:
3990 case RDIV_EXPR:
3991 if (TREE_CODE (op2) != INTEGER_CST)
3992 return weights->div_mod_cost;
3993 return 1;
3994
3995 /* Bit-field insertion needs several shift and mask operations. */
3996 case BIT_INSERT_EXPR:
3997 return 3;
3998
3999 default:
4000 /* We expect a copy assignment with no operator. */
4001 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4002 return 0;
4003 }
4004 }
4005
4006
4007 /* Estimate number of instructions that will be created by expanding
4008 the statements in the statement sequence STMTS.
4009 WEIGHTS contains weights attributed to various constructs. */
4010
4011 int
4012 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4013 {
4014 int cost;
4015 gimple_stmt_iterator gsi;
4016
4017 cost = 0;
4018 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4019 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4020
4021 return cost;
4022 }
4023
4024
4025 /* Estimate number of instructions that will be created by expanding STMT.
4026 WEIGHTS contains weights attributed to various constructs. */
4027
4028 int
4029 estimate_num_insns (gimple *stmt, eni_weights *weights)
4030 {
4031 unsigned cost, i;
4032 enum gimple_code code = gimple_code (stmt);
4033 tree lhs;
4034 tree rhs;
4035
4036 switch (code)
4037 {
4038 case GIMPLE_ASSIGN:
4039 /* Try to estimate the cost of assignments. We have three cases to
4040 deal with:
4041 1) Simple assignments to registers;
4042 2) Stores to things that must live in memory. This includes
4043 "normal" stores to scalars, but also assignments of large
4044 structures, or constructors of big arrays;
4045
4046 Let us look at the first two cases, assuming we have "a = b + C":
4047 <GIMPLE_ASSIGN <var_decl "a">
4048 <plus_expr <var_decl "b"> <constant C>>
4049 If "a" is a GIMPLE register, the assignment to it is free on almost
4050 any target, because "a" usually ends up in a real register. Hence
4051 the only cost of this expression comes from the PLUS_EXPR, and we
4052 can ignore the GIMPLE_ASSIGN.
4053 If "a" is not a GIMPLE register, the assignment to "a" will most
4054 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4055 of moving something into "a", which we compute using the function
4056 estimate_move_cost. */
4057 if (gimple_clobber_p (stmt))
4058 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4059
4060 lhs = gimple_assign_lhs (stmt);
4061 rhs = gimple_assign_rhs1 (stmt);
4062
4063 cost = 0;
4064
4065 /* Account for the cost of moving to / from memory. */
4066 if (gimple_store_p (stmt))
4067 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4068 if (gimple_assign_load_p (stmt))
4069 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4070
4071 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4072 gimple_assign_rhs1 (stmt),
4073 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4074 == GIMPLE_BINARY_RHS
4075 ? gimple_assign_rhs2 (stmt) : NULL);
4076 break;
4077
4078 case GIMPLE_COND:
4079 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4080 gimple_op (stmt, 0),
4081 gimple_op (stmt, 1));
4082 break;
4083
4084 case GIMPLE_SWITCH:
4085 {
4086 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4087 /* Take into account cost of the switch + guess 2 conditional jumps for
4088 each case label.
4089
4090 TODO: once the switch expansion logic is sufficiently separated, we can
4091 do better job on estimating cost of the switch. */
4092 if (weights->time_based)
4093 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4094 else
4095 cost = gimple_switch_num_labels (switch_stmt) * 2;
4096 }
4097 break;
4098
4099 case GIMPLE_CALL:
4100 {
4101 tree decl;
4102
4103 if (gimple_call_internal_p (stmt))
4104 return 0;
4105 else if ((decl = gimple_call_fndecl (stmt))
4106 && fndecl_built_in_p (decl))
4107 {
4108 /* Do not special case builtins where we see the body.
4109 This just confuse inliner. */
4110 struct cgraph_node *node;
4111 if (!(node = cgraph_node::get (decl))
4112 || node->definition)
4113 ;
4114 /* For buitins that are likely expanded to nothing or
4115 inlined do not account operand costs. */
4116 else if (is_simple_builtin (decl))
4117 return 0;
4118 else if (is_inexpensive_builtin (decl))
4119 return weights->target_builtin_call_cost;
4120 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4121 {
4122 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4123 specialize the cheap expansion we do here.
4124 ??? This asks for a more general solution. */
4125 switch (DECL_FUNCTION_CODE (decl))
4126 {
4127 case BUILT_IN_POW:
4128 case BUILT_IN_POWF:
4129 case BUILT_IN_POWL:
4130 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4131 && (real_equal
4132 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4133 &dconst2)))
4134 return estimate_operator_cost
4135 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4136 gimple_call_arg (stmt, 0));
4137 break;
4138
4139 default:
4140 break;
4141 }
4142 }
4143 }
4144
4145 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4146 if (gimple_call_lhs (stmt))
4147 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4148 weights->time_based);
4149 for (i = 0; i < gimple_call_num_args (stmt); i++)
4150 {
4151 tree arg = gimple_call_arg (stmt, i);
4152 cost += estimate_move_cost (TREE_TYPE (arg),
4153 weights->time_based);
4154 }
4155 break;
4156 }
4157
4158 case GIMPLE_RETURN:
4159 return weights->return_cost;
4160
4161 case GIMPLE_GOTO:
4162 case GIMPLE_LABEL:
4163 case GIMPLE_NOP:
4164 case GIMPLE_PHI:
4165 case GIMPLE_PREDICT:
4166 case GIMPLE_DEBUG:
4167 return 0;
4168
4169 case GIMPLE_ASM:
4170 {
4171 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4172 /* 1000 means infinity. This avoids overflows later
4173 with very long asm statements. */
4174 if (count > 1000)
4175 count = 1000;
4176 /* If this asm is asm inline, count anything as minimum size. */
4177 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4178 count = MIN (1, count);
4179 return MAX (1, count);
4180 }
4181
4182 case GIMPLE_RESX:
4183 /* This is either going to be an external function call with one
4184 argument, or two register copy statements plus a goto. */
4185 return 2;
4186
4187 case GIMPLE_EH_DISPATCH:
4188 /* ??? This is going to turn into a switch statement. Ideally
4189 we'd have a look at the eh region and estimate the number of
4190 edges involved. */
4191 return 10;
4192
4193 case GIMPLE_BIND:
4194 return estimate_num_insns_seq (
4195 gimple_bind_body (as_a <gbind *> (stmt)),
4196 weights);
4197
4198 case GIMPLE_EH_FILTER:
4199 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4200
4201 case GIMPLE_CATCH:
4202 return estimate_num_insns_seq (gimple_catch_handler (
4203 as_a <gcatch *> (stmt)),
4204 weights);
4205
4206 case GIMPLE_TRY:
4207 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4208 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4209
4210 /* OMP directives are generally very expensive. */
4211
4212 case GIMPLE_OMP_RETURN:
4213 case GIMPLE_OMP_SECTIONS_SWITCH:
4214 case GIMPLE_OMP_ATOMIC_STORE:
4215 case GIMPLE_OMP_CONTINUE:
4216 /* ...except these, which are cheap. */
4217 return 0;
4218
4219 case GIMPLE_OMP_ATOMIC_LOAD:
4220 return weights->omp_cost;
4221
4222 case GIMPLE_OMP_FOR:
4223 return (weights->omp_cost
4224 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4225 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4226
4227 case GIMPLE_OMP_PARALLEL:
4228 case GIMPLE_OMP_TASK:
4229 case GIMPLE_OMP_CRITICAL:
4230 case GIMPLE_OMP_MASTER:
4231 case GIMPLE_OMP_TASKGROUP:
4232 case GIMPLE_OMP_ORDERED:
4233 case GIMPLE_OMP_SECTION:
4234 case GIMPLE_OMP_SECTIONS:
4235 case GIMPLE_OMP_SINGLE:
4236 case GIMPLE_OMP_TARGET:
4237 case GIMPLE_OMP_TEAMS:
4238 return (weights->omp_cost
4239 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4240
4241 case GIMPLE_TRANSACTION:
4242 return (weights->tm_cost
4243 + estimate_num_insns_seq (gimple_transaction_body (
4244 as_a <gtransaction *> (stmt)),
4245 weights));
4246
4247 default:
4248 gcc_unreachable ();
4249 }
4250
4251 return cost;
4252 }
4253
4254 /* Estimate number of instructions that will be created by expanding
4255 function FNDECL. WEIGHTS contains weights attributed to various
4256 constructs. */
4257
4258 int
4259 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4260 {
4261 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4262 gimple_stmt_iterator bsi;
4263 basic_block bb;
4264 int n = 0;
4265
4266 gcc_assert (my_function && my_function->cfg);
4267 FOR_EACH_BB_FN (bb, my_function)
4268 {
4269 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4270 n += estimate_num_insns (gsi_stmt (bsi), weights);
4271 }
4272
4273 return n;
4274 }
4275
4276
4277 /* Initializes weights used by estimate_num_insns. */
4278
4279 void
4280 init_inline_once (void)
4281 {
4282 eni_size_weights.call_cost = 1;
4283 eni_size_weights.indirect_call_cost = 3;
4284 eni_size_weights.target_builtin_call_cost = 1;
4285 eni_size_weights.div_mod_cost = 1;
4286 eni_size_weights.omp_cost = 40;
4287 eni_size_weights.tm_cost = 10;
4288 eni_size_weights.time_based = false;
4289 eni_size_weights.return_cost = 1;
4290
4291 /* Estimating time for call is difficult, since we have no idea what the
4292 called function does. In the current uses of eni_time_weights,
4293 underestimating the cost does less harm than overestimating it, so
4294 we choose a rather small value here. */
4295 eni_time_weights.call_cost = 10;
4296 eni_time_weights.indirect_call_cost = 15;
4297 eni_time_weights.target_builtin_call_cost = 1;
4298 eni_time_weights.div_mod_cost = 10;
4299 eni_time_weights.omp_cost = 40;
4300 eni_time_weights.tm_cost = 40;
4301 eni_time_weights.time_based = true;
4302 eni_time_weights.return_cost = 2;
4303 }
4304
4305
4306 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4307
4308 static void
4309 prepend_lexical_block (tree current_block, tree new_block)
4310 {
4311 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4312 BLOCK_SUBBLOCKS (current_block) = new_block;
4313 BLOCK_SUPERCONTEXT (new_block) = current_block;
4314 }
4315
4316 /* Add local variables from CALLEE to CALLER. */
4317
4318 static inline void
4319 add_local_variables (struct function *callee, struct function *caller,
4320 copy_body_data *id)
4321 {
4322 tree var;
4323 unsigned ix;
4324
4325 FOR_EACH_LOCAL_DECL (callee, ix, var)
4326 if (!can_be_nonlocal (var, id))
4327 {
4328 tree new_var = remap_decl (var, id);
4329
4330 /* Remap debug-expressions. */
4331 if (VAR_P (new_var)
4332 && DECL_HAS_DEBUG_EXPR_P (var)
4333 && new_var != var)
4334 {
4335 tree tem = DECL_DEBUG_EXPR (var);
4336 bool old_regimplify = id->regimplify;
4337 id->remapping_type_depth++;
4338 walk_tree (&tem, copy_tree_body_r, id, NULL);
4339 id->remapping_type_depth--;
4340 id->regimplify = old_regimplify;
4341 SET_DECL_DEBUG_EXPR (new_var, tem);
4342 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4343 }
4344 add_local_decl (caller, new_var);
4345 }
4346 }
4347
4348 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4349 have brought in or introduced any debug stmts for SRCVAR. */
4350
4351 static inline void
4352 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4353 {
4354 tree *remappedvarp = id->decl_map->get (srcvar);
4355
4356 if (!remappedvarp)
4357 return;
4358
4359 if (!VAR_P (*remappedvarp))
4360 return;
4361
4362 if (*remappedvarp == id->retvar)
4363 return;
4364
4365 tree tvar = target_for_debug_bind (*remappedvarp);
4366 if (!tvar)
4367 return;
4368
4369 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4370 id->call_stmt);
4371 gimple_seq_add_stmt (bindings, stmt);
4372 }
4373
4374 /* For each inlined variable for which we may have debug bind stmts,
4375 add before GSI a final debug stmt resetting it, marking the end of
4376 its life, so that var-tracking knows it doesn't have to compute
4377 further locations for it. */
4378
4379 static inline void
4380 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4381 {
4382 tree var;
4383 unsigned ix;
4384 gimple_seq bindings = NULL;
4385
4386 if (!gimple_in_ssa_p (id->src_cfun))
4387 return;
4388
4389 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4390 return;
4391
4392 for (var = DECL_ARGUMENTS (id->src_fn);
4393 var; var = DECL_CHAIN (var))
4394 reset_debug_binding (id, var, &bindings);
4395
4396 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4397 reset_debug_binding (id, var, &bindings);
4398
4399 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4400 }
4401
4402 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4403
4404 static bool
4405 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4406 {
4407 tree use_retvar;
4408 tree fn;
4409 hash_map<tree, tree> *dst;
4410 hash_map<tree, tree> *st = NULL;
4411 tree return_slot;
4412 tree modify_dest;
4413 struct cgraph_edge *cg_edge;
4414 cgraph_inline_failed_t reason;
4415 basic_block return_block;
4416 edge e;
4417 gimple_stmt_iterator gsi, stmt_gsi;
4418 bool successfully_inlined = false;
4419 bool purge_dead_abnormal_edges;
4420 gcall *call_stmt;
4421 unsigned int prop_mask, src_properties;
4422 struct function *dst_cfun;
4423 tree simduid;
4424 use_operand_p use;
4425 gimple *simtenter_stmt = NULL;
4426 vec<tree> *simtvars_save;
4427
4428 /* The gimplifier uses input_location in too many places, such as
4429 internal_get_tmp_var (). */
4430 location_t saved_location = input_location;
4431 input_location = gimple_location (stmt);
4432
4433 /* From here on, we're only interested in CALL_EXPRs. */
4434 call_stmt = dyn_cast <gcall *> (stmt);
4435 if (!call_stmt)
4436 goto egress;
4437
4438 cg_edge = id->dst_node->get_edge (stmt);
4439 gcc_checking_assert (cg_edge);
4440 /* First, see if we can figure out what function is being called.
4441 If we cannot, then there is no hope of inlining the function. */
4442 if (cg_edge->indirect_unknown_callee)
4443 goto egress;
4444 fn = cg_edge->callee->decl;
4445 gcc_checking_assert (fn);
4446
4447 /* If FN is a declaration of a function in a nested scope that was
4448 globally declared inline, we don't set its DECL_INITIAL.
4449 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4450 C++ front-end uses it for cdtors to refer to their internal
4451 declarations, that are not real functions. Fortunately those
4452 don't have trees to be saved, so we can tell by checking their
4453 gimple_body. */
4454 if (!DECL_INITIAL (fn)
4455 && DECL_ABSTRACT_ORIGIN (fn)
4456 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4457 fn = DECL_ABSTRACT_ORIGIN (fn);
4458
4459 /* Don't try to inline functions that are not well-suited to inlining. */
4460 if (cg_edge->inline_failed)
4461 {
4462 reason = cg_edge->inline_failed;
4463 /* If this call was originally indirect, we do not want to emit any
4464 inlining related warnings or sorry messages because there are no
4465 guarantees regarding those. */
4466 if (cg_edge->indirect_inlining_edge)
4467 goto egress;
4468
4469 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4470 /* For extern inline functions that get redefined we always
4471 silently ignored always_inline flag. Better behavior would
4472 be to be able to keep both bodies and use extern inline body
4473 for inlining, but we can't do that because frontends overwrite
4474 the body. */
4475 && !cg_edge->callee->local.redefined_extern_inline
4476 /* During early inline pass, report only when optimization is
4477 not turned on. */
4478 && (symtab->global_info_ready
4479 || !optimize
4480 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4481 /* PR 20090218-1_0.c. Body can be provided by another module. */
4482 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4483 {
4484 error ("inlining failed in call to always_inline %q+F: %s", fn,
4485 cgraph_inline_failed_string (reason));
4486 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4487 inform (gimple_location (stmt), "called from here");
4488 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4489 inform (DECL_SOURCE_LOCATION (cfun->decl),
4490 "called from this function");
4491 }
4492 else if (warn_inline
4493 && DECL_DECLARED_INLINE_P (fn)
4494 && !DECL_NO_INLINE_WARNING_P (fn)
4495 && !DECL_IN_SYSTEM_HEADER (fn)
4496 && reason != CIF_UNSPECIFIED
4497 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4498 /* Do not warn about not inlined recursive calls. */
4499 && !cg_edge->recursive_p ()
4500 /* Avoid warnings during early inline pass. */
4501 && symtab->global_info_ready)
4502 {
4503 auto_diagnostic_group d;
4504 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4505 fn, _(cgraph_inline_failed_string (reason))))
4506 {
4507 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4508 inform (gimple_location (stmt), "called from here");
4509 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4510 inform (DECL_SOURCE_LOCATION (cfun->decl),
4511 "called from this function");
4512 }
4513 }
4514 goto egress;
4515 }
4516 id->src_node = cg_edge->callee;
4517
4518 /* If callee is thunk, all we need is to adjust the THIS pointer
4519 and redirect to function being thunked. */
4520 if (id->src_node->thunk.thunk_p)
4521 {
4522 cgraph_edge *edge;
4523 tree virtual_offset = NULL;
4524 profile_count count = cg_edge->count;
4525 tree op;
4526 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4527
4528 cg_edge->remove ();
4529 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4530 gimple_uid (stmt),
4531 profile_count::one (),
4532 profile_count::one (),
4533 true);
4534 edge->count = count;
4535 if (id->src_node->thunk.virtual_offset_p)
4536 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4537 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4538 NULL);
4539 gsi_insert_before (&iter, gimple_build_assign (op,
4540 gimple_call_arg (stmt, 0)),
4541 GSI_NEW_STMT);
4542 gcc_assert (id->src_node->thunk.this_adjusting);
4543 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4544 virtual_offset, id->src_node->thunk.indirect_offset);
4545
4546 gimple_call_set_arg (stmt, 0, op);
4547 gimple_call_set_fndecl (stmt, edge->callee->decl);
4548 update_stmt (stmt);
4549 id->src_node->remove ();
4550 expand_call_inline (bb, stmt, id);
4551 maybe_remove_unused_call_args (cfun, stmt);
4552 return true;
4553 }
4554 fn = cg_edge->callee->decl;
4555 cg_edge->callee->get_untransformed_body ();
4556
4557 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4558 cg_edge->callee->verify ();
4559
4560 /* We will be inlining this callee. */
4561 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4562
4563 /* Update the callers EH personality. */
4564 if (DECL_FUNCTION_PERSONALITY (fn))
4565 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4566 = DECL_FUNCTION_PERSONALITY (fn);
4567
4568 /* Split the block before the GIMPLE_CALL. */
4569 stmt_gsi = gsi_for_stmt (stmt);
4570 gsi_prev (&stmt_gsi);
4571 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4572 bb = e->src;
4573 return_block = e->dest;
4574 remove_edge (e);
4575
4576 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4577 been the source of abnormal edges. In this case, schedule
4578 the removal of dead abnormal edges. */
4579 gsi = gsi_start_bb (return_block);
4580 gsi_next (&gsi);
4581 purge_dead_abnormal_edges = gsi_end_p (gsi);
4582
4583 stmt_gsi = gsi_start_bb (return_block);
4584
4585 /* Build a block containing code to initialize the arguments, the
4586 actual inline expansion of the body, and a label for the return
4587 statements within the function to jump to. The type of the
4588 statement expression is the return type of the function call.
4589 ??? If the call does not have an associated block then we will
4590 remap all callee blocks to NULL, effectively dropping most of
4591 its debug information. This should only happen for calls to
4592 artificial decls inserted by the compiler itself. We need to
4593 either link the inlined blocks into the caller block tree or
4594 not refer to them in any way to not break GC for locations. */
4595 if (gimple_block (stmt))
4596 {
4597 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4598 to make inlined_function_outer_scope_p return true on this BLOCK. */
4599 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4600 if (loc == UNKNOWN_LOCATION)
4601 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4602 if (loc == UNKNOWN_LOCATION)
4603 loc = BUILTINS_LOCATION;
4604 id->block = make_node (BLOCK);
4605 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4606 BLOCK_SOURCE_LOCATION (id->block) = loc;
4607 prepend_lexical_block (gimple_block (stmt), id->block);
4608 }
4609
4610 /* Local declarations will be replaced by their equivalents in this map. */
4611 st = id->decl_map;
4612 id->decl_map = new hash_map<tree, tree>;
4613 dst = id->debug_map;
4614 id->debug_map = NULL;
4615 if (flag_stack_reuse != SR_NONE)
4616 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4617
4618 /* Record the function we are about to inline. */
4619 id->src_fn = fn;
4620 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4621 id->reset_location = DECL_IGNORED_P (fn);
4622 id->call_stmt = call_stmt;
4623
4624 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4625 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4626 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4627 simtvars_save = id->dst_simt_vars;
4628 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4629 && (simduid = bb->loop_father->simduid) != NULL_TREE
4630 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4631 && single_imm_use (simduid, &use, &simtenter_stmt)
4632 && is_gimple_call (simtenter_stmt)
4633 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4634 vec_alloc (id->dst_simt_vars, 0);
4635 else
4636 id->dst_simt_vars = NULL;
4637
4638 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4639 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4640
4641 /* If the src function contains an IFN_VA_ARG, then so will the dst
4642 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4643 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4644 src_properties = id->src_cfun->curr_properties & prop_mask;
4645 if (src_properties != prop_mask)
4646 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4647
4648 gcc_assert (!id->src_cfun->after_inlining);
4649
4650 id->entry_bb = bb;
4651 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4652 {
4653 gimple_stmt_iterator si = gsi_last_bb (bb);
4654 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4655 NOT_TAKEN),
4656 GSI_NEW_STMT);
4657 }
4658 initialize_inlined_parameters (id, stmt, fn, bb);
4659 if (debug_nonbind_markers_p && debug_inline_points && id->block
4660 && inlined_function_outer_scope_p (id->block))
4661 {
4662 gimple_stmt_iterator si = gsi_last_bb (bb);
4663 gsi_insert_after (&si, gimple_build_debug_inline_entry
4664 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4665 GSI_NEW_STMT);
4666 }
4667
4668 if (DECL_INITIAL (fn))
4669 {
4670 if (gimple_block (stmt))
4671 {
4672 tree *var;
4673
4674 prepend_lexical_block (id->block,
4675 remap_blocks (DECL_INITIAL (fn), id));
4676 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4677 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4678 == NULL_TREE));
4679 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4680 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4681 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4682 under it. The parameters can be then evaluated in the debugger,
4683 but don't show in backtraces. */
4684 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4685 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4686 {
4687 tree v = *var;
4688 *var = TREE_CHAIN (v);
4689 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4690 BLOCK_VARS (id->block) = v;
4691 }
4692 else
4693 var = &TREE_CHAIN (*var);
4694 }
4695 else
4696 remap_blocks_to_null (DECL_INITIAL (fn), id);
4697 }
4698
4699 /* Return statements in the function body will be replaced by jumps
4700 to the RET_LABEL. */
4701 gcc_assert (DECL_INITIAL (fn));
4702 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4703
4704 /* Find the LHS to which the result of this call is assigned. */
4705 return_slot = NULL;
4706 if (gimple_call_lhs (stmt))
4707 {
4708 modify_dest = gimple_call_lhs (stmt);
4709
4710 /* The function which we are inlining might not return a value,
4711 in which case we should issue a warning that the function
4712 does not return a value. In that case the optimizers will
4713 see that the variable to which the value is assigned was not
4714 initialized. We do not want to issue a warning about that
4715 uninitialized variable. */
4716 if (DECL_P (modify_dest))
4717 TREE_NO_WARNING (modify_dest) = 1;
4718
4719 if (gimple_call_return_slot_opt_p (call_stmt))
4720 {
4721 return_slot = modify_dest;
4722 modify_dest = NULL;
4723 }
4724 }
4725 else
4726 modify_dest = NULL;
4727
4728 /* If we are inlining a call to the C++ operator new, we don't want
4729 to use type based alias analysis on the return value. Otherwise
4730 we may get confused if the compiler sees that the inlined new
4731 function returns a pointer which was just deleted. See bug
4732 33407. */
4733 if (DECL_IS_OPERATOR_NEW (fn))
4734 {
4735 return_slot = NULL;
4736 modify_dest = NULL;
4737 }
4738
4739 /* Declare the return variable for the function. */
4740 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4741
4742 /* Add local vars in this inlined callee to caller. */
4743 add_local_variables (id->src_cfun, cfun, id);
4744
4745 if (dump_enabled_p ())
4746 {
4747 char buf[128];
4748 snprintf (buf, sizeof(buf), "%4.2f",
4749 cg_edge->sreal_frequency ().to_double ());
4750 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
4751 call_stmt,
4752 "Inlining %C to %C with frequency %s\n",
4753 id->src_node, id->dst_node, buf);
4754 if (dump_file && (dump_flags & TDF_DETAILS))
4755 {
4756 id->src_node->dump (dump_file);
4757 id->dst_node->dump (dump_file);
4758 }
4759 }
4760
4761 /* This is it. Duplicate the callee body. Assume callee is
4762 pre-gimplified. Note that we must not alter the caller
4763 function in any way before this point, as this CALL_EXPR may be
4764 a self-referential call; if we're calling ourselves, we need to
4765 duplicate our body before altering anything. */
4766 copy_body (id, bb, return_block, NULL);
4767
4768 reset_debug_bindings (id, stmt_gsi);
4769
4770 if (flag_stack_reuse != SR_NONE)
4771 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4772 if (!TREE_THIS_VOLATILE (p))
4773 {
4774 tree *varp = id->decl_map->get (p);
4775 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4776 {
4777 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4778 gimple *clobber_stmt;
4779 TREE_THIS_VOLATILE (clobber) = 1;
4780 clobber_stmt = gimple_build_assign (*varp, clobber);
4781 gimple_set_location (clobber_stmt, gimple_location (stmt));
4782 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4783 }
4784 }
4785
4786 /* Reset the escaped solution. */
4787 if (cfun->gimple_df)
4788 pt_solution_reset (&cfun->gimple_df->escaped);
4789
4790 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4791 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4792 {
4793 size_t nargs = gimple_call_num_args (simtenter_stmt);
4794 vec<tree> *vars = id->dst_simt_vars;
4795 auto_vec<tree> newargs (nargs + vars->length ());
4796 for (size_t i = 0; i < nargs; i++)
4797 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4798 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4799 {
4800 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4801 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4802 }
4803 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4804 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4805 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4806 gsi_replace (&gsi, g, false);
4807 }
4808 vec_free (id->dst_simt_vars);
4809 id->dst_simt_vars = simtvars_save;
4810
4811 /* Clean up. */
4812 if (id->debug_map)
4813 {
4814 delete id->debug_map;
4815 id->debug_map = dst;
4816 }
4817 delete id->decl_map;
4818 id->decl_map = st;
4819
4820 /* Unlink the calls virtual operands before replacing it. */
4821 unlink_stmt_vdef (stmt);
4822 if (gimple_vdef (stmt)
4823 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4824 release_ssa_name (gimple_vdef (stmt));
4825
4826 /* If the inlined function returns a result that we care about,
4827 substitute the GIMPLE_CALL with an assignment of the return
4828 variable to the LHS of the call. That is, if STMT was
4829 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4830 if (use_retvar && gimple_call_lhs (stmt))
4831 {
4832 gimple *old_stmt = stmt;
4833 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4834 gimple_set_location (stmt, gimple_location (old_stmt));
4835 gsi_replace (&stmt_gsi, stmt, false);
4836 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4837 /* Append a clobber for id->retvar if easily possible. */
4838 if (flag_stack_reuse != SR_NONE
4839 && id->retvar
4840 && VAR_P (id->retvar)
4841 && id->retvar != return_slot
4842 && id->retvar != modify_dest
4843 && !TREE_THIS_VOLATILE (id->retvar)
4844 && !is_gimple_reg (id->retvar)
4845 && !stmt_ends_bb_p (stmt))
4846 {
4847 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4848 gimple *clobber_stmt;
4849 TREE_THIS_VOLATILE (clobber) = 1;
4850 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4851 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4852 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4853 }
4854 }
4855 else
4856 {
4857 /* Handle the case of inlining a function with no return
4858 statement, which causes the return value to become undefined. */
4859 if (gimple_call_lhs (stmt)
4860 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4861 {
4862 tree name = gimple_call_lhs (stmt);
4863 tree var = SSA_NAME_VAR (name);
4864 tree def = var ? ssa_default_def (cfun, var) : NULL;
4865
4866 if (def)
4867 {
4868 /* If the variable is used undefined, make this name
4869 undefined via a move. */
4870 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4871 gsi_replace (&stmt_gsi, stmt, true);
4872 }
4873 else
4874 {
4875 if (!var)
4876 {
4877 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4878 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4879 }
4880 /* Otherwise make this variable undefined. */
4881 gsi_remove (&stmt_gsi, true);
4882 set_ssa_default_def (cfun, var, name);
4883 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4884 }
4885 }
4886 /* Replace with a clobber for id->retvar. */
4887 else if (flag_stack_reuse != SR_NONE
4888 && id->retvar
4889 && VAR_P (id->retvar)
4890 && id->retvar != return_slot
4891 && id->retvar != modify_dest
4892 && !TREE_THIS_VOLATILE (id->retvar)
4893 && !is_gimple_reg (id->retvar))
4894 {
4895 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4896 gimple *clobber_stmt;
4897 TREE_THIS_VOLATILE (clobber) = 1;
4898 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4899 gimple_set_location (clobber_stmt, gimple_location (stmt));
4900 gsi_replace (&stmt_gsi, clobber_stmt, false);
4901 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
4902 }
4903 else
4904 gsi_remove (&stmt_gsi, true);
4905 }
4906
4907 if (purge_dead_abnormal_edges)
4908 {
4909 gimple_purge_dead_eh_edges (return_block);
4910 gimple_purge_dead_abnormal_call_edges (return_block);
4911 }
4912
4913 /* If the value of the new expression is ignored, that's OK. We
4914 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4915 the equivalent inlined version either. */
4916 if (is_gimple_assign (stmt))
4917 {
4918 gcc_assert (gimple_assign_single_p (stmt)
4919 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4920 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4921 }
4922
4923 id->add_clobbers_to_eh_landing_pads = 0;
4924
4925 /* Output the inlining info for this abstract function, since it has been
4926 inlined. If we don't do this now, we can lose the information about the
4927 variables in the function when the blocks get blown away as soon as we
4928 remove the cgraph node. */
4929 if (gimple_block (stmt))
4930 (*debug_hooks->outlining_inline_function) (fn);
4931
4932 /* Update callgraph if needed. */
4933 cg_edge->callee->remove ();
4934
4935 id->block = NULL_TREE;
4936 id->retvar = NULL_TREE;
4937 successfully_inlined = true;
4938
4939 egress:
4940 input_location = saved_location;
4941 return successfully_inlined;
4942 }
4943
4944 /* Expand call statements reachable from STMT_P.
4945 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4946 in a MODIFY_EXPR. */
4947
4948 static bool
4949 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4950 {
4951 gimple_stmt_iterator gsi;
4952 bool inlined = false;
4953
4954 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4955 {
4956 gimple *stmt = gsi_stmt (gsi);
4957 gsi_prev (&gsi);
4958
4959 if (is_gimple_call (stmt)
4960 && !gimple_call_internal_p (stmt))
4961 inlined |= expand_call_inline (bb, stmt, id);
4962 }
4963
4964 return inlined;
4965 }
4966
4967
4968 /* Walk all basic blocks created after FIRST and try to fold every statement
4969 in the STATEMENTS pointer set. */
4970
4971 static void
4972 fold_marked_statements (int first, hash_set<gimple *> *statements)
4973 {
4974 for (; first < last_basic_block_for_fn (cfun); first++)
4975 if (BASIC_BLOCK_FOR_FN (cfun, first))
4976 {
4977 gimple_stmt_iterator gsi;
4978
4979 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4980 !gsi_end_p (gsi);
4981 gsi_next (&gsi))
4982 if (statements->contains (gsi_stmt (gsi)))
4983 {
4984 gimple *old_stmt = gsi_stmt (gsi);
4985 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4986
4987 if (old_decl && fndecl_built_in_p (old_decl))
4988 {
4989 /* Folding builtins can create multiple instructions,
4990 we need to look at all of them. */
4991 gimple_stmt_iterator i2 = gsi;
4992 gsi_prev (&i2);
4993 if (fold_stmt (&gsi))
4994 {
4995 gimple *new_stmt;
4996 /* If a builtin at the end of a bb folded into nothing,
4997 the following loop won't work. */
4998 if (gsi_end_p (gsi))
4999 {
5000 cgraph_update_edges_for_call_stmt (old_stmt,
5001 old_decl, NULL);
5002 break;
5003 }
5004 if (gsi_end_p (i2))
5005 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5006 else
5007 gsi_next (&i2);
5008 while (1)
5009 {
5010 new_stmt = gsi_stmt (i2);
5011 update_stmt (new_stmt);
5012 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5013 new_stmt);
5014
5015 if (new_stmt == gsi_stmt (gsi))
5016 {
5017 /* It is okay to check only for the very last
5018 of these statements. If it is a throwing
5019 statement nothing will change. If it isn't
5020 this can remove EH edges. If that weren't
5021 correct then because some intermediate stmts
5022 throw, but not the last one. That would mean
5023 we'd have to split the block, which we can't
5024 here and we'd loose anyway. And as builtins
5025 probably never throw, this all
5026 is mood anyway. */
5027 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5028 new_stmt))
5029 gimple_purge_dead_eh_edges (
5030 BASIC_BLOCK_FOR_FN (cfun, first));
5031 break;
5032 }
5033 gsi_next (&i2);
5034 }
5035 }
5036 }
5037 else if (fold_stmt (&gsi))
5038 {
5039 /* Re-read the statement from GSI as fold_stmt() may
5040 have changed it. */
5041 gimple *new_stmt = gsi_stmt (gsi);
5042 update_stmt (new_stmt);
5043
5044 if (is_gimple_call (old_stmt)
5045 || is_gimple_call (new_stmt))
5046 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5047 new_stmt);
5048
5049 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5050 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5051 first));
5052 }
5053 }
5054 }
5055 }
5056
5057 /* Expand calls to inline functions in the body of FN. */
5058
5059 unsigned int
5060 optimize_inline_calls (tree fn)
5061 {
5062 copy_body_data id;
5063 basic_block bb;
5064 int last = n_basic_blocks_for_fn (cfun);
5065 bool inlined_p = false;
5066
5067 /* Clear out ID. */
5068 memset (&id, 0, sizeof (id));
5069
5070 id.src_node = id.dst_node = cgraph_node::get (fn);
5071 gcc_assert (id.dst_node->definition);
5072 id.dst_fn = fn;
5073 /* Or any functions that aren't finished yet. */
5074 if (current_function_decl)
5075 id.dst_fn = current_function_decl;
5076
5077 id.copy_decl = copy_decl_maybe_to_var;
5078 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5079 id.transform_new_cfg = false;
5080 id.transform_return_to_modify = true;
5081 id.transform_parameter = true;
5082 id.transform_lang_insert_block = NULL;
5083 id.statements_to_fold = new hash_set<gimple *>;
5084
5085 push_gimplify_context ();
5086
5087 /* We make no attempts to keep dominance info up-to-date. */
5088 free_dominance_info (CDI_DOMINATORS);
5089 free_dominance_info (CDI_POST_DOMINATORS);
5090
5091 /* Register specific gimple functions. */
5092 gimple_register_cfg_hooks ();
5093
5094 /* Reach the trees by walking over the CFG, and note the
5095 enclosing basic-blocks in the call edges. */
5096 /* We walk the blocks going forward, because inlined function bodies
5097 will split id->current_basic_block, and the new blocks will
5098 follow it; we'll trudge through them, processing their CALL_EXPRs
5099 along the way. */
5100 FOR_EACH_BB_FN (bb, cfun)
5101 inlined_p |= gimple_expand_calls_inline (bb, &id);
5102
5103 pop_gimplify_context (NULL);
5104
5105 if (flag_checking)
5106 {
5107 struct cgraph_edge *e;
5108
5109 id.dst_node->verify ();
5110
5111 /* Double check that we inlined everything we are supposed to inline. */
5112 for (e = id.dst_node->callees; e; e = e->next_callee)
5113 gcc_assert (e->inline_failed);
5114 }
5115
5116 /* Fold queued statements. */
5117 update_max_bb_count ();
5118 fold_marked_statements (last, id.statements_to_fold);
5119 delete id.statements_to_fold;
5120
5121 gcc_assert (!id.debug_stmts.exists ());
5122
5123 /* If we didn't inline into the function there is nothing to do. */
5124 if (!inlined_p)
5125 return 0;
5126
5127 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5128 number_blocks (fn);
5129
5130 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5131
5132 if (flag_checking)
5133 id.dst_node->verify ();
5134
5135 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5136 not possible yet - the IPA passes might make various functions to not
5137 throw and they don't care to proactively update local EH info. This is
5138 done later in fixup_cfg pass that also execute the verification. */
5139 return (TODO_update_ssa
5140 | TODO_cleanup_cfg
5141 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5142 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5143 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5144 ? TODO_rebuild_frequencies : 0));
5145 }
5146
5147 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5148
5149 tree
5150 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5151 {
5152 enum tree_code code = TREE_CODE (*tp);
5153 enum tree_code_class cl = TREE_CODE_CLASS (code);
5154
5155 /* We make copies of most nodes. */
5156 if (IS_EXPR_CODE_CLASS (cl)
5157 || code == TREE_LIST
5158 || code == TREE_VEC
5159 || code == TYPE_DECL
5160 || code == OMP_CLAUSE)
5161 {
5162 /* Because the chain gets clobbered when we make a copy, we save it
5163 here. */
5164 tree chain = NULL_TREE, new_tree;
5165
5166 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5167 chain = TREE_CHAIN (*tp);
5168
5169 /* Copy the node. */
5170 new_tree = copy_node (*tp);
5171
5172 *tp = new_tree;
5173
5174 /* Now, restore the chain, if appropriate. That will cause
5175 walk_tree to walk into the chain as well. */
5176 if (code == PARM_DECL
5177 || code == TREE_LIST
5178 || code == OMP_CLAUSE)
5179 TREE_CHAIN (*tp) = chain;
5180
5181 /* For now, we don't update BLOCKs when we make copies. So, we
5182 have to nullify all BIND_EXPRs. */
5183 if (TREE_CODE (*tp) == BIND_EXPR)
5184 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5185 }
5186 else if (code == CONSTRUCTOR)
5187 {
5188 /* CONSTRUCTOR nodes need special handling because
5189 we need to duplicate the vector of elements. */
5190 tree new_tree;
5191
5192 new_tree = copy_node (*tp);
5193 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5194 *tp = new_tree;
5195 }
5196 else if (code == STATEMENT_LIST)
5197 /* We used to just abort on STATEMENT_LIST, but we can run into them
5198 with statement-expressions (c++/40975). */
5199 copy_statement_list (tp);
5200 else if (TREE_CODE_CLASS (code) == tcc_type)
5201 *walk_subtrees = 0;
5202 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5203 *walk_subtrees = 0;
5204 else if (TREE_CODE_CLASS (code) == tcc_constant)
5205 *walk_subtrees = 0;
5206 return NULL_TREE;
5207 }
5208
5209 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5210 information indicating to what new SAVE_EXPR this one should be mapped,
5211 use that one. Otherwise, create a new node and enter it in ST. FN is
5212 the function into which the copy will be placed. */
5213
5214 static void
5215 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5216 {
5217 tree *n;
5218 tree t;
5219
5220 /* See if we already encountered this SAVE_EXPR. */
5221 n = st->get (*tp);
5222
5223 /* If we didn't already remap this SAVE_EXPR, do so now. */
5224 if (!n)
5225 {
5226 t = copy_node (*tp);
5227
5228 /* Remember this SAVE_EXPR. */
5229 st->put (*tp, t);
5230 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5231 st->put (t, t);
5232 }
5233 else
5234 {
5235 /* We've already walked into this SAVE_EXPR; don't do it again. */
5236 *walk_subtrees = 0;
5237 t = *n;
5238 }
5239
5240 /* Replace this SAVE_EXPR with the copy. */
5241 *tp = t;
5242 }
5243
5244 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5245 label, copies the declaration and enters it in the splay_tree in DATA (which
5246 is really a 'copy_body_data *'. */
5247
5248 static tree
5249 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5250 bool *handled_ops_p ATTRIBUTE_UNUSED,
5251 struct walk_stmt_info *wi)
5252 {
5253 copy_body_data *id = (copy_body_data *) wi->info;
5254 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5255
5256 if (stmt)
5257 {
5258 tree decl = gimple_label_label (stmt);
5259
5260 /* Copy the decl and remember the copy. */
5261 insert_decl_map (id, decl, id->copy_decl (decl, id));
5262 }
5263
5264 return NULL_TREE;
5265 }
5266
5267 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5268 struct walk_stmt_info *wi);
5269
5270 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5271 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5272 remaps all local declarations to appropriate replacements in gimple
5273 operands. */
5274
5275 static tree
5276 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5277 {
5278 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5279 copy_body_data *id = (copy_body_data *) wi->info;
5280 hash_map<tree, tree> *st = id->decl_map;
5281 tree *n;
5282 tree expr = *tp;
5283
5284 /* For recursive invocations this is no longer the LHS itself. */
5285 bool is_lhs = wi->is_lhs;
5286 wi->is_lhs = false;
5287
5288 if (TREE_CODE (expr) == SSA_NAME)
5289 {
5290 *tp = remap_ssa_name (*tp, id);
5291 *walk_subtrees = 0;
5292 if (is_lhs)
5293 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5294 }
5295 /* Only a local declaration (variable or label). */
5296 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5297 || TREE_CODE (expr) == LABEL_DECL)
5298 {
5299 /* Lookup the declaration. */
5300 n = st->get (expr);
5301
5302 /* If it's there, remap it. */
5303 if (n)
5304 *tp = *n;
5305 *walk_subtrees = 0;
5306 }
5307 else if (TREE_CODE (expr) == STATEMENT_LIST
5308 || TREE_CODE (expr) == BIND_EXPR
5309 || TREE_CODE (expr) == SAVE_EXPR)
5310 gcc_unreachable ();
5311 else if (TREE_CODE (expr) == TARGET_EXPR)
5312 {
5313 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5314 It's OK for this to happen if it was part of a subtree that
5315 isn't immediately expanded, such as operand 2 of another
5316 TARGET_EXPR. */
5317 if (!TREE_OPERAND (expr, 1))
5318 {
5319 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5320 TREE_OPERAND (expr, 3) = NULL_TREE;
5321 }
5322 }
5323 else if (TREE_CODE (expr) == OMP_CLAUSE)
5324 {
5325 /* Before the omplower pass completes, some OMP clauses can contain
5326 sequences that are neither copied by gimple_seq_copy nor walked by
5327 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5328 in those situations, we have to copy and process them explicitely. */
5329
5330 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5331 {
5332 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5333 seq = duplicate_remap_omp_clause_seq (seq, wi);
5334 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5335 }
5336 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5337 {
5338 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5339 seq = duplicate_remap_omp_clause_seq (seq, wi);
5340 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5341 }
5342 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5343 {
5344 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5345 seq = duplicate_remap_omp_clause_seq (seq, wi);
5346 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5347 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5348 seq = duplicate_remap_omp_clause_seq (seq, wi);
5349 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5350 }
5351 }
5352
5353 /* Keep iterating. */
5354 return NULL_TREE;
5355 }
5356
5357
5358 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5359 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5360 remaps all local declarations to appropriate replacements in gimple
5361 statements. */
5362
5363 static tree
5364 replace_locals_stmt (gimple_stmt_iterator *gsip,
5365 bool *handled_ops_p ATTRIBUTE_UNUSED,
5366 struct walk_stmt_info *wi)
5367 {
5368 copy_body_data *id = (copy_body_data *) wi->info;
5369 gimple *gs = gsi_stmt (*gsip);
5370
5371 if (gbind *stmt = dyn_cast <gbind *> (gs))
5372 {
5373 tree block = gimple_bind_block (stmt);
5374
5375 if (block)
5376 {
5377 remap_block (&block, id);
5378 gimple_bind_set_block (stmt, block);
5379 }
5380
5381 /* This will remap a lot of the same decls again, but this should be
5382 harmless. */
5383 if (gimple_bind_vars (stmt))
5384 {
5385 tree old_var, decls = gimple_bind_vars (stmt);
5386
5387 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5388 if (!can_be_nonlocal (old_var, id)
5389 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5390 remap_decl (old_var, id);
5391
5392 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5393 id->prevent_decl_creation_for_types = true;
5394 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5395 id->prevent_decl_creation_for_types = false;
5396 }
5397 }
5398
5399 /* Keep iterating. */
5400 return NULL_TREE;
5401 }
5402
5403 /* Create a copy of SEQ and remap all decls in it. */
5404
5405 static gimple_seq
5406 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5407 {
5408 if (!seq)
5409 return NULL;
5410
5411 /* If there are any labels in OMP sequences, they can be only referred to in
5412 the sequence itself and therefore we can do both here. */
5413 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5414 gimple_seq copy = gimple_seq_copy (seq);
5415 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5416 return copy;
5417 }
5418
5419 /* Copies everything in SEQ and replaces variables and labels local to
5420 current_function_decl. */
5421
5422 gimple_seq
5423 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5424 {
5425 copy_body_data id;
5426 struct walk_stmt_info wi;
5427 gimple_seq copy;
5428
5429 /* There's nothing to do for NULL_TREE. */
5430 if (seq == NULL)
5431 return seq;
5432
5433 /* Set up ID. */
5434 memset (&id, 0, sizeof (id));
5435 id.src_fn = current_function_decl;
5436 id.dst_fn = current_function_decl;
5437 id.src_cfun = cfun;
5438 id.decl_map = new hash_map<tree, tree>;
5439 id.debug_map = NULL;
5440
5441 id.copy_decl = copy_decl_no_change;
5442 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5443 id.transform_new_cfg = false;
5444 id.transform_return_to_modify = false;
5445 id.transform_parameter = false;
5446 id.transform_lang_insert_block = NULL;
5447
5448 /* Walk the tree once to find local labels. */
5449 memset (&wi, 0, sizeof (wi));
5450 hash_set<tree> visited;
5451 wi.info = &id;
5452 wi.pset = &visited;
5453 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5454
5455 copy = gimple_seq_copy (seq);
5456
5457 /* Walk the copy, remapping decls. */
5458 memset (&wi, 0, sizeof (wi));
5459 wi.info = &id;
5460 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5461
5462 /* Clean up. */
5463 delete id.decl_map;
5464 if (id.debug_map)
5465 delete id.debug_map;
5466 if (id.dependence_map)
5467 {
5468 delete id.dependence_map;
5469 id.dependence_map = NULL;
5470 }
5471
5472 return copy;
5473 }
5474
5475
5476 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5477
5478 static tree
5479 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5480 {
5481 if (*tp == data)
5482 return (tree) data;
5483 else
5484 return NULL;
5485 }
5486
5487 DEBUG_FUNCTION bool
5488 debug_find_tree (tree top, tree search)
5489 {
5490 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5491 }
5492
5493
5494 /* Declare the variables created by the inliner. Add all the variables in
5495 VARS to BIND_EXPR. */
5496
5497 static void
5498 declare_inline_vars (tree block, tree vars)
5499 {
5500 tree t;
5501 for (t = vars; t; t = DECL_CHAIN (t))
5502 {
5503 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5504 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5505 add_local_decl (cfun, t);
5506 }
5507
5508 if (block)
5509 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5510 }
5511
5512 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5513 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5514 VAR_DECL translation. */
5515
5516 tree
5517 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5518 {
5519 /* Don't generate debug information for the copy if we wouldn't have
5520 generated it for the copy either. */
5521 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5522 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5523
5524 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5525 declaration inspired this copy. */
5526 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5527
5528 /* The new variable/label has no RTL, yet. */
5529 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5530 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5531 SET_DECL_RTL (copy, 0);
5532 /* For vector typed decls make sure to update DECL_MODE according
5533 to the new function context. */
5534 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5535 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5536
5537 /* These args would always appear unused, if not for this. */
5538 TREE_USED (copy) = 1;
5539
5540 /* Set the context for the new declaration. */
5541 if (!DECL_CONTEXT (decl))
5542 /* Globals stay global. */
5543 ;
5544 else if (DECL_CONTEXT (decl) != id->src_fn)
5545 /* Things that weren't in the scope of the function we're inlining
5546 from aren't in the scope we're inlining to, either. */
5547 ;
5548 else if (TREE_STATIC (decl))
5549 /* Function-scoped static variables should stay in the original
5550 function. */
5551 ;
5552 else
5553 {
5554 /* Ordinary automatic local variables are now in the scope of the
5555 new function. */
5556 DECL_CONTEXT (copy) = id->dst_fn;
5557 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5558 {
5559 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5560 DECL_ATTRIBUTES (copy)
5561 = tree_cons (get_identifier ("omp simt private"), NULL,
5562 DECL_ATTRIBUTES (copy));
5563 id->dst_simt_vars->safe_push (copy);
5564 }
5565 }
5566
5567 return copy;
5568 }
5569
5570 static tree
5571 copy_decl_to_var (tree decl, copy_body_data *id)
5572 {
5573 tree copy, type;
5574
5575 gcc_assert (TREE_CODE (decl) == PARM_DECL
5576 || TREE_CODE (decl) == RESULT_DECL);
5577
5578 type = TREE_TYPE (decl);
5579
5580 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5581 VAR_DECL, DECL_NAME (decl), type);
5582 if (DECL_PT_UID_SET_P (decl))
5583 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5584 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5585 TREE_READONLY (copy) = TREE_READONLY (decl);
5586 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5587 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5588
5589 return copy_decl_for_dup_finish (id, decl, copy);
5590 }
5591
5592 /* Like copy_decl_to_var, but create a return slot object instead of a
5593 pointer variable for return by invisible reference. */
5594
5595 static tree
5596 copy_result_decl_to_var (tree decl, copy_body_data *id)
5597 {
5598 tree copy, type;
5599
5600 gcc_assert (TREE_CODE (decl) == PARM_DECL
5601 || TREE_CODE (decl) == RESULT_DECL);
5602
5603 type = TREE_TYPE (decl);
5604 if (DECL_BY_REFERENCE (decl))
5605 type = TREE_TYPE (type);
5606
5607 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5608 VAR_DECL, DECL_NAME (decl), type);
5609 if (DECL_PT_UID_SET_P (decl))
5610 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5611 TREE_READONLY (copy) = TREE_READONLY (decl);
5612 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5613 if (!DECL_BY_REFERENCE (decl))
5614 {
5615 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5616 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5617 }
5618
5619 return copy_decl_for_dup_finish (id, decl, copy);
5620 }
5621
5622 tree
5623 copy_decl_no_change (tree decl, copy_body_data *id)
5624 {
5625 tree copy;
5626
5627 copy = copy_node (decl);
5628
5629 /* The COPY is not abstract; it will be generated in DST_FN. */
5630 DECL_ABSTRACT_P (copy) = false;
5631 lang_hooks.dup_lang_specific_decl (copy);
5632
5633 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5634 been taken; it's for internal bookkeeping in expand_goto_internal. */
5635 if (TREE_CODE (copy) == LABEL_DECL)
5636 {
5637 TREE_ADDRESSABLE (copy) = 0;
5638 LABEL_DECL_UID (copy) = -1;
5639 }
5640
5641 return copy_decl_for_dup_finish (id, decl, copy);
5642 }
5643
5644 static tree
5645 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5646 {
5647 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5648 return copy_decl_to_var (decl, id);
5649 else
5650 return copy_decl_no_change (decl, id);
5651 }
5652
5653 /* Return a copy of the function's argument tree. */
5654 static tree
5655 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5656 bitmap args_to_skip, tree *vars)
5657 {
5658 tree arg, *parg;
5659 tree new_parm = NULL;
5660 int i = 0;
5661
5662 parg = &new_parm;
5663
5664 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5665 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5666 {
5667 tree new_tree = remap_decl (arg, id);
5668 if (TREE_CODE (new_tree) != PARM_DECL)
5669 new_tree = id->copy_decl (arg, id);
5670 lang_hooks.dup_lang_specific_decl (new_tree);
5671 *parg = new_tree;
5672 parg = &DECL_CHAIN (new_tree);
5673 }
5674 else if (!id->decl_map->get (arg))
5675 {
5676 /* Make an equivalent VAR_DECL. If the argument was used
5677 as temporary variable later in function, the uses will be
5678 replaced by local variable. */
5679 tree var = copy_decl_to_var (arg, id);
5680 insert_decl_map (id, arg, var);
5681 /* Declare this new variable. */
5682 DECL_CHAIN (var) = *vars;
5683 *vars = var;
5684 }
5685 return new_parm;
5686 }
5687
5688 /* Return a copy of the function's static chain. */
5689 static tree
5690 copy_static_chain (tree static_chain, copy_body_data * id)
5691 {
5692 tree *chain_copy, *pvar;
5693
5694 chain_copy = &static_chain;
5695 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5696 {
5697 tree new_tree = remap_decl (*pvar, id);
5698 lang_hooks.dup_lang_specific_decl (new_tree);
5699 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5700 *pvar = new_tree;
5701 }
5702 return static_chain;
5703 }
5704
5705 /* Return true if the function is allowed to be versioned.
5706 This is a guard for the versioning functionality. */
5707
5708 bool
5709 tree_versionable_function_p (tree fndecl)
5710 {
5711 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5712 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5713 }
5714
5715 /* Update clone info after duplication. */
5716
5717 static void
5718 update_clone_info (copy_body_data * id)
5719 {
5720 struct cgraph_node *node;
5721 if (!id->dst_node->clones)
5722 return;
5723 for (node = id->dst_node->clones; node != id->dst_node;)
5724 {
5725 /* First update replace maps to match the new body. */
5726 if (node->clone.tree_map)
5727 {
5728 unsigned int i;
5729 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5730 {
5731 struct ipa_replace_map *replace_info;
5732 replace_info = (*node->clone.tree_map)[i];
5733 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5734 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5735 }
5736 }
5737 if (node->clones)
5738 node = node->clones;
5739 else if (node->next_sibling_clone)
5740 node = node->next_sibling_clone;
5741 else
5742 {
5743 while (node != id->dst_node && !node->next_sibling_clone)
5744 node = node->clone_of;
5745 if (node != id->dst_node)
5746 node = node->next_sibling_clone;
5747 }
5748 }
5749 }
5750
5751 /* Create a copy of a function's tree.
5752 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5753 of the original function and the new copied function
5754 respectively. In case we want to replace a DECL
5755 tree with another tree while duplicating the function's
5756 body, TREE_MAP represents the mapping between these
5757 trees. If UPDATE_CLONES is set, the call_stmt fields
5758 of edges of clones of the function will be updated.
5759
5760 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5761 from new version.
5762 If SKIP_RETURN is true, the new version will return void.
5763 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5764 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5765 */
5766 void
5767 tree_function_versioning (tree old_decl, tree new_decl,
5768 vec<ipa_replace_map *, va_gc> *tree_map,
5769 bool update_clones, bitmap args_to_skip,
5770 bool skip_return, bitmap blocks_to_copy,
5771 basic_block new_entry)
5772 {
5773 struct cgraph_node *old_version_node;
5774 struct cgraph_node *new_version_node;
5775 copy_body_data id;
5776 tree p;
5777 unsigned i;
5778 struct ipa_replace_map *replace_info;
5779 basic_block old_entry_block, bb;
5780 auto_vec<gimple *, 10> init_stmts;
5781 tree vars = NULL_TREE;
5782 bitmap debug_args_to_skip = args_to_skip;
5783
5784 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5785 && TREE_CODE (new_decl) == FUNCTION_DECL);
5786 DECL_POSSIBLY_INLINED (old_decl) = 1;
5787
5788 old_version_node = cgraph_node::get (old_decl);
5789 gcc_checking_assert (old_version_node);
5790 new_version_node = cgraph_node::get (new_decl);
5791 gcc_checking_assert (new_version_node);
5792
5793 /* Copy over debug args. */
5794 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5795 {
5796 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5797 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5798 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5799 old_debug_args = decl_debug_args_lookup (old_decl);
5800 if (old_debug_args)
5801 {
5802 new_debug_args = decl_debug_args_insert (new_decl);
5803 *new_debug_args = vec_safe_copy (*old_debug_args);
5804 }
5805 }
5806
5807 /* Output the inlining info for this abstract function, since it has been
5808 inlined. If we don't do this now, we can lose the information about the
5809 variables in the function when the blocks get blown away as soon as we
5810 remove the cgraph node. */
5811 (*debug_hooks->outlining_inline_function) (old_decl);
5812
5813 DECL_ARTIFICIAL (new_decl) = 1;
5814 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5815 if (DECL_ORIGIN (old_decl) == old_decl)
5816 old_version_node->used_as_abstract_origin = true;
5817 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5818
5819 /* Prepare the data structures for the tree copy. */
5820 memset (&id, 0, sizeof (id));
5821
5822 /* Generate a new name for the new version. */
5823 id.statements_to_fold = new hash_set<gimple *>;
5824
5825 id.decl_map = new hash_map<tree, tree>;
5826 id.debug_map = NULL;
5827 id.src_fn = old_decl;
5828 id.dst_fn = new_decl;
5829 id.src_node = old_version_node;
5830 id.dst_node = new_version_node;
5831 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5832 id.blocks_to_copy = blocks_to_copy;
5833
5834 id.copy_decl = copy_decl_no_change;
5835 id.transform_call_graph_edges
5836 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5837 id.transform_new_cfg = true;
5838 id.transform_return_to_modify = false;
5839 id.transform_parameter = false;
5840 id.transform_lang_insert_block = NULL;
5841
5842 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5843 (DECL_STRUCT_FUNCTION (old_decl));
5844 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5845 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5846 initialize_cfun (new_decl, old_decl,
5847 new_entry ? new_entry->count : old_entry_block->count);
5848 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5849 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5850 = id.src_cfun->gimple_df->ipa_pta;
5851
5852 /* Copy the function's static chain. */
5853 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5854 if (p)
5855 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5856 = copy_static_chain (p, &id);
5857
5858 /* If there's a tree_map, prepare for substitution. */
5859 if (tree_map)
5860 for (i = 0; i < tree_map->length (); i++)
5861 {
5862 gimple *init;
5863 replace_info = (*tree_map)[i];
5864 if (replace_info->replace_p)
5865 {
5866 int parm_num = -1;
5867 if (!replace_info->old_tree)
5868 {
5869 int p = replace_info->parm_num;
5870 tree parm;
5871 tree req_type, new_type;
5872
5873 for (parm = DECL_ARGUMENTS (old_decl); p;
5874 parm = DECL_CHAIN (parm))
5875 p--;
5876 replace_info->old_tree = parm;
5877 parm_num = replace_info->parm_num;
5878 req_type = TREE_TYPE (parm);
5879 new_type = TREE_TYPE (replace_info->new_tree);
5880 if (!useless_type_conversion_p (req_type, new_type))
5881 {
5882 if (fold_convertible_p (req_type, replace_info->new_tree))
5883 replace_info->new_tree
5884 = fold_build1 (NOP_EXPR, req_type,
5885 replace_info->new_tree);
5886 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5887 replace_info->new_tree
5888 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
5889 replace_info->new_tree);
5890 else
5891 {
5892 if (dump_file)
5893 {
5894 fprintf (dump_file, " const ");
5895 print_generic_expr (dump_file,
5896 replace_info->new_tree);
5897 fprintf (dump_file,
5898 " can't be converted to param ");
5899 print_generic_expr (dump_file, parm);
5900 fprintf (dump_file, "\n");
5901 }
5902 replace_info->old_tree = NULL;
5903 }
5904 }
5905 }
5906 else
5907 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5908 if (replace_info->old_tree)
5909 {
5910 init = setup_one_parameter (&id, replace_info->old_tree,
5911 replace_info->new_tree, id.src_fn,
5912 NULL,
5913 &vars);
5914 if (init)
5915 init_stmts.safe_push (init);
5916 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
5917 {
5918 if (parm_num == -1)
5919 {
5920 tree parm;
5921 int p;
5922 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
5923 parm = DECL_CHAIN (parm), p++)
5924 if (parm == replace_info->old_tree)
5925 {
5926 parm_num = p;
5927 break;
5928 }
5929 }
5930 if (parm_num != -1)
5931 {
5932 if (debug_args_to_skip == args_to_skip)
5933 {
5934 debug_args_to_skip = BITMAP_ALLOC (NULL);
5935 bitmap_copy (debug_args_to_skip, args_to_skip);
5936 }
5937 bitmap_clear_bit (debug_args_to_skip, parm_num);
5938 }
5939 }
5940 }
5941 }
5942 }
5943 /* Copy the function's arguments. */
5944 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5945 DECL_ARGUMENTS (new_decl)
5946 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5947 args_to_skip, &vars);
5948
5949 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5950 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5951
5952 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5953
5954 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5955 /* Add local vars. */
5956 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5957
5958 if (DECL_RESULT (old_decl) == NULL_TREE)
5959 ;
5960 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5961 {
5962 DECL_RESULT (new_decl)
5963 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5964 RESULT_DECL, NULL_TREE, void_type_node);
5965 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5966 cfun->returns_struct = 0;
5967 cfun->returns_pcc_struct = 0;
5968 }
5969 else
5970 {
5971 tree old_name;
5972 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5973 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5974 if (gimple_in_ssa_p (id.src_cfun)
5975 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5976 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5977 {
5978 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5979 insert_decl_map (&id, old_name, new_name);
5980 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5981 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5982 }
5983 }
5984
5985 /* Set up the destination functions loop tree. */
5986 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5987 {
5988 cfun->curr_properties &= ~PROP_loops;
5989 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5990 cfun->curr_properties |= PROP_loops;
5991 }
5992
5993 /* Copy the Function's body. */
5994 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5995 new_entry);
5996
5997 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5998 number_blocks (new_decl);
5999
6000 /* We want to create the BB unconditionally, so that the addition of
6001 debug stmts doesn't affect BB count, which may in the end cause
6002 codegen differences. */
6003 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6004 while (init_stmts.length ())
6005 insert_init_stmt (&id, bb, init_stmts.pop ());
6006 update_clone_info (&id);
6007
6008 /* Remap the nonlocal_goto_save_area, if any. */
6009 if (cfun->nonlocal_goto_save_area)
6010 {
6011 struct walk_stmt_info wi;
6012
6013 memset (&wi, 0, sizeof (wi));
6014 wi.info = &id;
6015 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6016 }
6017
6018 /* Clean up. */
6019 delete id.decl_map;
6020 if (id.debug_map)
6021 delete id.debug_map;
6022 free_dominance_info (CDI_DOMINATORS);
6023 free_dominance_info (CDI_POST_DOMINATORS);
6024
6025 update_max_bb_count ();
6026 fold_marked_statements (0, id.statements_to_fold);
6027 delete id.statements_to_fold;
6028 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6029 if (id.dst_node->definition)
6030 cgraph_edge::rebuild_references ();
6031 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6032 {
6033 calculate_dominance_info (CDI_DOMINATORS);
6034 fix_loop_structure (NULL);
6035 }
6036 update_ssa (TODO_update_ssa);
6037
6038 /* After partial cloning we need to rescale frequencies, so they are
6039 within proper range in the cloned function. */
6040 if (new_entry)
6041 {
6042 struct cgraph_edge *e;
6043 rebuild_frequencies ();
6044
6045 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6046 for (e = new_version_node->callees; e; e = e->next_callee)
6047 {
6048 basic_block bb = gimple_bb (e->call_stmt);
6049 e->count = bb->count;
6050 }
6051 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6052 {
6053 basic_block bb = gimple_bb (e->call_stmt);
6054 e->count = bb->count;
6055 }
6056 }
6057
6058 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6059 {
6060 tree parm;
6061 vec<tree, va_gc> **debug_args = NULL;
6062 unsigned int len = 0;
6063 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6064 parm; parm = DECL_CHAIN (parm), i++)
6065 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6066 {
6067 tree ddecl;
6068
6069 if (debug_args == NULL)
6070 {
6071 debug_args = decl_debug_args_insert (new_decl);
6072 len = vec_safe_length (*debug_args);
6073 }
6074 ddecl = make_node (DEBUG_EXPR_DECL);
6075 DECL_ARTIFICIAL (ddecl) = 1;
6076 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6077 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6078 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6079 vec_safe_push (*debug_args, ddecl);
6080 }
6081 if (debug_args != NULL)
6082 {
6083 /* On the callee side, add
6084 DEBUG D#Y s=> parm
6085 DEBUG var => D#Y
6086 stmts to the first bb where var is a VAR_DECL created for the
6087 optimized away parameter in DECL_INITIAL block. This hints
6088 in the debug info that var (whole DECL_ORIGIN is the parm
6089 PARM_DECL) is optimized away, but could be looked up at the
6090 call site as value of D#X there. */
6091 tree var = vars, vexpr;
6092 gimple_stmt_iterator cgsi
6093 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6094 gimple *def_temp;
6095 var = vars;
6096 i = vec_safe_length (*debug_args);
6097 do
6098 {
6099 i -= 2;
6100 while (var != NULL_TREE
6101 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6102 var = TREE_CHAIN (var);
6103 if (var == NULL_TREE)
6104 break;
6105 vexpr = make_node (DEBUG_EXPR_DECL);
6106 parm = (**debug_args)[i];
6107 DECL_ARTIFICIAL (vexpr) = 1;
6108 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6109 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6110 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6111 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6112 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6113 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6114 }
6115 while (i > len);
6116 }
6117 }
6118
6119 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6120 BITMAP_FREE (debug_args_to_skip);
6121 free_dominance_info (CDI_DOMINATORS);
6122 free_dominance_info (CDI_POST_DOMINATORS);
6123
6124 gcc_assert (!id.debug_stmts.exists ());
6125 pop_cfun ();
6126 return;
6127 }
6128
6129 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6130 the callee and return the inlined body on success. */
6131
6132 tree
6133 maybe_inline_call_in_expr (tree exp)
6134 {
6135 tree fn = get_callee_fndecl (exp);
6136
6137 /* We can only try to inline "const" functions. */
6138 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6139 {
6140 call_expr_arg_iterator iter;
6141 copy_body_data id;
6142 tree param, arg, t;
6143 hash_map<tree, tree> decl_map;
6144
6145 /* Remap the parameters. */
6146 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6147 param;
6148 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6149 decl_map.put (param, arg);
6150
6151 memset (&id, 0, sizeof (id));
6152 id.src_fn = fn;
6153 id.dst_fn = current_function_decl;
6154 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6155 id.decl_map = &decl_map;
6156
6157 id.copy_decl = copy_decl_no_change;
6158 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6159 id.transform_new_cfg = false;
6160 id.transform_return_to_modify = true;
6161 id.transform_parameter = true;
6162 id.transform_lang_insert_block = NULL;
6163
6164 /* Make sure not to unshare trees behind the front-end's back
6165 since front-end specific mechanisms may rely on sharing. */
6166 id.regimplify = false;
6167 id.do_not_unshare = true;
6168
6169 /* We're not inside any EH region. */
6170 id.eh_lp_nr = 0;
6171
6172 t = copy_tree_body (&id);
6173
6174 /* We can only return something suitable for use in a GENERIC
6175 expression tree. */
6176 if (TREE_CODE (t) == MODIFY_EXPR)
6177 return TREE_OPERAND (t, 1);
6178 }
6179
6180 return NULL_TREE;
6181 }
6182
6183 /* Duplicate a type, fields and all. */
6184
6185 tree
6186 build_duplicate_type (tree type)
6187 {
6188 struct copy_body_data id;
6189
6190 memset (&id, 0, sizeof (id));
6191 id.src_fn = current_function_decl;
6192 id.dst_fn = current_function_decl;
6193 id.src_cfun = cfun;
6194 id.decl_map = new hash_map<tree, tree>;
6195 id.debug_map = NULL;
6196 id.copy_decl = copy_decl_no_change;
6197
6198 type = remap_type_1 (type, &id);
6199
6200 delete id.decl_map;
6201 if (id.debug_map)
6202 delete id.debug_map;
6203
6204 TYPE_CANONICAL (type) = type;
6205
6206 return type;
6207 }
6208
6209 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6210 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6211 evaluation. */
6212
6213 tree
6214 copy_fn (tree fn, tree& parms, tree& result)
6215 {
6216 copy_body_data id;
6217 tree param;
6218 hash_map<tree, tree> decl_map;
6219
6220 tree *p = &parms;
6221 *p = NULL_TREE;
6222
6223 memset (&id, 0, sizeof (id));
6224 id.src_fn = fn;
6225 id.dst_fn = current_function_decl;
6226 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6227 id.decl_map = &decl_map;
6228
6229 id.copy_decl = copy_decl_no_change;
6230 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6231 id.transform_new_cfg = false;
6232 id.transform_return_to_modify = false;
6233 id.transform_parameter = true;
6234 id.transform_lang_insert_block = NULL;
6235
6236 /* Make sure not to unshare trees behind the front-end's back
6237 since front-end specific mechanisms may rely on sharing. */
6238 id.regimplify = false;
6239 id.do_not_unshare = true;
6240
6241 /* We're not inside any EH region. */
6242 id.eh_lp_nr = 0;
6243
6244 /* Remap the parameters and result and return them to the caller. */
6245 for (param = DECL_ARGUMENTS (fn);
6246 param;
6247 param = DECL_CHAIN (param))
6248 {
6249 *p = remap_decl (param, &id);
6250 p = &DECL_CHAIN (*p);
6251 }
6252
6253 if (DECL_RESULT (fn))
6254 result = remap_decl (DECL_RESULT (fn), &id);
6255 else
6256 result = NULL_TREE;
6257
6258 return copy_tree_body (&id);
6259 }