c++: Adjust pushdecl/duplicate_decls API
[gcc.git] / gcc / ipa-param-manipulation.c
1 /* Manipulation of formal and actual parameters of functions and function
2 calls.
3 Copyright (C) 2017-2020 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ssa.h"
28 #include "cgraph.h"
29 #include "fold-const.h"
30 #include "tree-eh.h"
31 #include "stor-layout.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "gimplify-me.h"
35 #include "tree-cfg.h"
36 #include "tree-dfa.h"
37 #include "ipa-param-manipulation.h"
38 #include "print-tree.h"
39 #include "gimple-pretty-print.h"
40 #include "builtins.h"
41 #include "tree-ssa.h"
42 #include "tree-inline.h"
43
44
45 /* Actual prefixes of different newly synthetized parameters. Keep in sync
46 with IPA_PARAM_PREFIX_* defines. */
47
48 static const char *ipa_param_prefixes[IPA_PARAM_PREFIX_COUNT]
49 = {"SYNTH",
50 "ISRA",
51 "simd",
52 "mask"};
53
54 /* Names of parameters for dumping. Keep in sync with enum ipa_parm_op. */
55
56 static const char *ipa_param_op_names[IPA_PARAM_PREFIX_COUNT]
57 = {"IPA_PARAM_OP_UNDEFINED",
58 "IPA_PARAM_OP_COPY",
59 "IPA_PARAM_OP_NEW",
60 "IPA_PARAM_OP_SPLIT"};
61
62 /* Fill an empty vector ARGS with PARM_DECLs representing formal parameters of
63 FNDECL. The function should not be called during LTO WPA phase except for
64 thunks (or functions with bodies streamed in). */
65
66 void
67 push_function_arg_decls (vec<tree> *args, tree fndecl)
68 {
69 int count;
70 tree parm;
71
72 /* Safety check that we do not attempt to use the function in WPA, except
73 when the function is a thunk and then we have DECL_ARGUMENTS or when we
74 have already explicitely loaded its body. */
75 gcc_assert (!flag_wpa
76 || DECL_ARGUMENTS (fndecl)
77 || gimple_has_body_p (fndecl));
78 count = 0;
79 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
80 count++;
81
82 args->reserve_exact (count);
83 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
84 args->quick_push (parm);
85 }
86
87 /* Fill an empty vector TYPES with trees representing formal parameters of
88 function type FNTYPE. */
89
90 void
91 push_function_arg_types (vec<tree> *types, tree fntype)
92 {
93 int count = 0;
94 tree t;
95
96 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
97 count++;
98
99 types->reserve_exact (count);
100 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
101 types->quick_push (TREE_VALUE (t));
102 }
103
104 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
105 friendly way, assuming they are meant to be applied to FNDECL. */
106
107 void
108 ipa_dump_adjusted_parameters (FILE *f,
109 vec<ipa_adjusted_param, va_gc> *adj_params)
110 {
111 unsigned i, len = vec_safe_length (adj_params);
112 bool first = true;
113
114 if (!len)
115 return;
116
117 fprintf (f, " IPA adjusted parameters: ");
118 for (i = 0; i < len; i++)
119 {
120 struct ipa_adjusted_param *apm;
121 apm = &(*adj_params)[i];
122
123 if (!first)
124 fprintf (f, " ");
125 else
126 first = false;
127
128 fprintf (f, "%i. %s %s", i, ipa_param_op_names[apm->op],
129 apm->prev_clone_adjustment ? "prev_clone_adjustment " : "");
130 switch (apm->op)
131 {
132 case IPA_PARAM_OP_UNDEFINED:
133 break;
134
135 case IPA_PARAM_OP_COPY:
136 fprintf (f, ", base_index: %u", apm->base_index);
137 fprintf (f, ", prev_clone_index: %u", apm->prev_clone_index);
138 break;
139
140 case IPA_PARAM_OP_SPLIT:
141 fprintf (f, ", offset: %u", apm->unit_offset);
142 /* fall-through */
143 case IPA_PARAM_OP_NEW:
144 fprintf (f, ", base_index: %u", apm->base_index);
145 fprintf (f, ", prev_clone_index: %u", apm->prev_clone_index);
146 print_node_brief (f, ", type: ", apm->type, 0);
147 print_node_brief (f, ", alias type: ", apm->alias_ptr_type, 0);
148 fprintf (f, " prefix: %s",
149 ipa_param_prefixes[apm->param_prefix_index]);
150 if (apm->reverse)
151 fprintf (f, ", reverse-sso");
152 break;
153 }
154 fprintf (f, "\n");
155 }
156 }
157
158 /* Fill NEW_TYPES with types of a function after its current OTYPES have been
159 modified as described in ADJ_PARAMS. When USE_PREV_INDICES is true, use
160 prev_clone_index from ADJ_PARAMS as opposed to base_index when the parameter
161 is false. */
162
163 static void
164 fill_vector_of_new_param_types (vec<tree> *new_types, vec<tree> *otypes,
165 vec<ipa_adjusted_param, va_gc> *adj_params,
166 bool use_prev_indices)
167 {
168 unsigned adj_len = vec_safe_length (adj_params);
169 new_types->reserve_exact (adj_len);
170 for (unsigned i = 0; i < adj_len ; i++)
171 {
172 ipa_adjusted_param *apm = &(*adj_params)[i];
173 if (apm->op == IPA_PARAM_OP_COPY)
174 {
175 unsigned index
176 = use_prev_indices ? apm->prev_clone_index : apm->base_index;
177 /* The following needs to be handled gracefully because of type
178 mismatches. This happens with LTO but apparently also in Fortran
179 with -fcoarray=lib -O2 -lcaf_single -latomic. */
180 if (index >= otypes->length ())
181 continue;
182 new_types->quick_push ((*otypes)[index]);
183 }
184 else if (apm->op == IPA_PARAM_OP_NEW
185 || apm->op == IPA_PARAM_OP_SPLIT)
186 {
187 tree ntype = apm->type;
188 if (is_gimple_reg_type (ntype)
189 && TYPE_MODE (ntype) != BLKmode)
190 {
191 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ntype));
192 if (TYPE_ALIGN (ntype) != malign)
193 ntype = build_aligned_type (ntype, malign);
194 }
195 new_types->quick_push (ntype);
196 }
197 else
198 gcc_unreachable ();
199 }
200 }
201
202 /* Build and return a function type just like ORIG_TYPE but with parameter
203 types given in NEW_PARAM_TYPES - which can be NULL if, but only if,
204 ORIG_TYPE itself has NULL TREE_ARG_TYPEs. If METHOD2FUNC is true, also make
205 it a FUNCTION_TYPE instead of FUNCTION_TYPE. */
206
207 static tree
208 build_adjusted_function_type (tree orig_type, vec<tree> *new_param_types,
209 bool method2func, bool skip_return)
210 {
211 tree new_arg_types = NULL;
212 if (TYPE_ARG_TYPES (orig_type))
213 {
214 gcc_checking_assert (new_param_types);
215 bool last_parm_void = (TREE_VALUE (tree_last (TYPE_ARG_TYPES (orig_type)))
216 == void_type_node);
217 unsigned len = new_param_types->length ();
218 for (unsigned i = 0; i < len; i++)
219 new_arg_types = tree_cons (NULL_TREE, (*new_param_types)[i],
220 new_arg_types);
221
222 tree new_reversed = nreverse (new_arg_types);
223 if (last_parm_void)
224 {
225 if (new_reversed)
226 TREE_CHAIN (new_arg_types) = void_list_node;
227 else
228 new_reversed = void_list_node;
229 }
230 new_arg_types = new_reversed;
231 }
232
233 /* Use build_distinct_type_copy to preserve as much as possible from original
234 type (debug info, attribute lists etc.). The one exception is
235 METHOD_TYPEs which must have THIS argument and when we are asked to remove
236 it, we need to build new FUNCTION_TYPE instead. */
237 tree new_type = NULL;
238 if (method2func)
239 {
240 tree ret_type;
241 if (skip_return)
242 ret_type = void_type_node;
243 else
244 ret_type = TREE_TYPE (orig_type);
245
246 new_type
247 = build_distinct_type_copy (build_function_type (ret_type,
248 new_arg_types));
249 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
250 }
251 else
252 {
253 new_type = build_distinct_type_copy (orig_type);
254 TYPE_ARG_TYPES (new_type) = new_arg_types;
255 if (skip_return)
256 TREE_TYPE (new_type) = void_type_node;
257 }
258
259 return new_type;
260 }
261
262 /* Return the maximum index in any IPA_PARAM_OP_COPY adjustment or -1 if there
263 is none. */
264
265 int
266 ipa_param_adjustments::get_max_base_index ()
267 {
268 unsigned adj_len = vec_safe_length (m_adj_params);
269 int max_index = -1;
270 for (unsigned i = 0; i < adj_len ; i++)
271 {
272 ipa_adjusted_param *apm = &(*m_adj_params)[i];
273 if (apm->op == IPA_PARAM_OP_COPY
274 && max_index < apm->base_index)
275 max_index = apm->base_index;
276 }
277 return max_index;
278 }
279
280
281 /* Fill SURVIVING_PARAMS with an array of bools where each one says whether a
282 parameter that originally was at that position still survives in the given
283 clone or is removed/replaced. If the final array is smaller than an index
284 of an original parameter, that parameter also did not survive. That a
285 parameter survives does not mean it has the same index as before. */
286
287 void
288 ipa_param_adjustments::get_surviving_params (vec<bool> *surviving_params)
289 {
290 unsigned adj_len = vec_safe_length (m_adj_params);
291 int max_index = get_max_base_index ();
292
293 if (max_index < 0)
294 return;
295 surviving_params->reserve_exact (max_index + 1);
296 surviving_params->quick_grow_cleared (max_index + 1);
297 for (unsigned i = 0; i < adj_len ; i++)
298 {
299 ipa_adjusted_param *apm = &(*m_adj_params)[i];
300 if (apm->op == IPA_PARAM_OP_COPY)
301 (*surviving_params)[apm->base_index] = true;
302 }
303 }
304
305 /* Fill NEW_INDICES with new indices of each surviving parameter or -1 for
306 those which do not survive. Any parameter outside of lenght of the vector
307 does not survive. There is currently no support for a parameter to be
308 copied to two distinct new parameters. */
309
310 void
311 ipa_param_adjustments::get_updated_indices (vec<int> *new_indices)
312 {
313 unsigned adj_len = vec_safe_length (m_adj_params);
314 int max_index = get_max_base_index ();
315
316 if (max_index < 0)
317 return;
318 unsigned res_len = max_index + 1;
319 new_indices->reserve_exact (res_len);
320 for (unsigned i = 0; i < res_len ; i++)
321 new_indices->quick_push (-1);
322 for (unsigned i = 0; i < adj_len ; i++)
323 {
324 ipa_adjusted_param *apm = &(*m_adj_params)[i];
325 if (apm->op == IPA_PARAM_OP_COPY)
326 (*new_indices)[apm->base_index] = i;
327 }
328 }
329
330 /* Return the original index for the given new parameter index. Return a
331 negative number if not available. */
332
333 int
334 ipa_param_adjustments::get_original_index (int newidx)
335 {
336 const ipa_adjusted_param *adj = &(*m_adj_params)[newidx];
337 if (adj->op != IPA_PARAM_OP_COPY)
338 return -1;
339 return adj->base_index;
340 }
341
342 /* Return true if the first parameter (assuming there was one) survives the
343 transformation intact and remains the first one. */
344
345 bool
346 ipa_param_adjustments::first_param_intact_p ()
347 {
348 return (!vec_safe_is_empty (m_adj_params)
349 && (*m_adj_params)[0].op == IPA_PARAM_OP_COPY
350 && (*m_adj_params)[0].base_index == 0);
351 }
352
353 /* Return true if we have to change what has formerly been a method into a
354 function. */
355
356 bool
357 ipa_param_adjustments::method2func_p (tree orig_type)
358 {
359 return ((TREE_CODE (orig_type) == METHOD_TYPE) && !first_param_intact_p ());
360 }
361
362 /* Given function type OLD_TYPE, return a new type derived from it after
363 performing all atored modifications. TYPE_ORIGINAL_P should be true when
364 OLD_TYPE refers to the type before any IPA transformations, as opposed to a
365 type that can be an intermediate one in between various IPA
366 transformations. */
367
368 tree
369 ipa_param_adjustments::build_new_function_type (tree old_type,
370 bool type_original_p)
371 {
372 auto_vec<tree,16> new_param_types, *new_param_types_p;
373 if (prototype_p (old_type))
374 {
375 auto_vec<tree, 16> otypes;
376 push_function_arg_types (&otypes, old_type);
377 fill_vector_of_new_param_types (&new_param_types, &otypes, m_adj_params,
378 !type_original_p);
379 new_param_types_p = &new_param_types;
380 }
381 else
382 new_param_types_p = NULL;
383
384 return build_adjusted_function_type (old_type, new_param_types_p,
385 method2func_p (old_type), m_skip_return);
386 }
387
388 /* Build variant of function decl ORIG_DECL which has no return value if
389 M_SKIP_RETURN is true and, if ORIG_DECL's types or parameters is known, has
390 this type adjusted as indicated in M_ADJ_PARAMS. Arguments from
391 DECL_ARGUMENTS list are not processed now, since they are linked by
392 TREE_CHAIN directly and not accessible in LTO during WPA. The caller is
393 responsible for eliminating them when clones are properly materialized. */
394
395 tree
396 ipa_param_adjustments::adjust_decl (tree orig_decl)
397 {
398 tree new_decl = copy_node (orig_decl);
399 tree orig_type = TREE_TYPE (orig_decl);
400 if (prototype_p (orig_type)
401 || (m_skip_return && !VOID_TYPE_P (TREE_TYPE (orig_type))))
402 {
403 tree new_type = build_new_function_type (orig_type, false);
404 TREE_TYPE (new_decl) = new_type;
405 }
406 if (method2func_p (orig_type))
407 DECL_VINDEX (new_decl) = NULL_TREE;
408
409 /* When signature changes, we need to clear builtin info. */
410 if (fndecl_built_in_p (new_decl))
411 set_decl_built_in_function (new_decl, NOT_BUILT_IN, 0);
412
413 DECL_VIRTUAL_P (new_decl) = 0;
414 DECL_LANG_SPECIFIC (new_decl) = NULL;
415
416 /* Drop MALLOC attribute for a void function. */
417 if (m_skip_return)
418 DECL_IS_MALLOC (new_decl) = 0;
419
420 return new_decl;
421 }
422
423 /* Wrapper around get_base_ref_and_offset for cases interesting for IPA-SRA
424 transformations. Return true if EXPR has an interesting form and fill in
425 *BASE_P and *UNIT_OFFSET_P with the appropriate info. */
426
427 static bool
428 isra_get_ref_base_and_offset (tree expr, tree *base_p, unsigned *unit_offset_p)
429 {
430 HOST_WIDE_INT offset, size;
431 bool reverse;
432 tree base
433 = get_ref_base_and_extent_hwi (expr, &offset, &size, &reverse);
434 if (!base || size < 0)
435 return false;
436
437 if ((offset % BITS_PER_UNIT) != 0)
438 return false;
439
440 if (TREE_CODE (base) == MEM_REF)
441 {
442 poly_int64 plmoff = mem_ref_offset (base).force_shwi ();
443 HOST_WIDE_INT moff;
444 bool is_cst = plmoff.is_constant (&moff);
445 if (!is_cst)
446 return false;
447 offset += moff * BITS_PER_UNIT;
448 base = TREE_OPERAND (base, 0);
449 }
450
451 if (offset < 0 || (offset / BITS_PER_UNIT) > UINT_MAX)
452 return false;
453
454 *base_p = base;
455 *unit_offset_p = offset / BITS_PER_UNIT;
456 return true;
457 }
458
459 /* Return true if EXPR describes a transitive split (i.e. one that happened for
460 both the caller and the callee) as recorded in PERFORMED_SPLITS. In that
461 case, store index of the respective record in PERFORMED_SPLITS into
462 *SM_IDX_P and the unit offset from all handled components in EXPR into
463 *UNIT_OFFSET_P. */
464
465 static bool
466 transitive_split_p (vec<ipa_param_performed_split, va_gc> *performed_splits,
467 tree expr, unsigned *sm_idx_p, unsigned *unit_offset_p)
468 {
469 tree base;
470 if (!isra_get_ref_base_and_offset (expr, &base, unit_offset_p))
471 return false;
472
473 if (TREE_CODE (base) == SSA_NAME)
474 {
475 base = SSA_NAME_VAR (base);
476 if (!base)
477 return false;
478 }
479
480 unsigned len = vec_safe_length (performed_splits);
481 for (unsigned i = 0 ; i < len; i++)
482 {
483 ipa_param_performed_split *sm = &(*performed_splits)[i];
484 if (sm->dummy_decl == base)
485 {
486 *sm_idx_p = i;
487 return true;
488 }
489 }
490 return false;
491 }
492
493 /* Structure to hold declarations representing transitive IPA-SRA splits. In
494 essence, if we need to pass UNIT_OFFSET of a parameter which originally has
495 number BASE_INDEX, we should pass down REPL. */
496
497 struct transitive_split_map
498 {
499 tree repl;
500 unsigned base_index;
501 unsigned unit_offset;
502 };
503
504 /* If call STMT contains any parameters representing transitive splits as
505 described by PERFORMED_SPLITS, return the number of extra parameters that
506 were addded during clone materialization and fill in INDEX_MAP with adjusted
507 indices of corresponding original parameters and TRANS_MAP with description
508 of all transitive replacement descriptions. Otherwise return zero. */
509
510 static unsigned
511 init_transitive_splits (vec<ipa_param_performed_split, va_gc> *performed_splits,
512 gcall *stmt, vec <unsigned> *index_map,
513 auto_vec <transitive_split_map> *trans_map)
514 {
515 unsigned phony_arguments = 0;
516 unsigned stmt_idx = 0, base_index = 0;
517 unsigned nargs = gimple_call_num_args (stmt);
518 while (stmt_idx < nargs)
519 {
520 unsigned unit_offset_delta;
521 tree base_arg = gimple_call_arg (stmt, stmt_idx);
522
523 if (phony_arguments > 0)
524 index_map->safe_push (stmt_idx);
525
526 unsigned sm_idx;
527 stmt_idx++;
528 if (transitive_split_p (performed_splits, base_arg, &sm_idx,
529 &unit_offset_delta))
530 {
531 if (phony_arguments == 0)
532 /* We have optimistically avoided constructing index_map do far but
533 now it is clear it will be necessary, so let's create the easy
534 bit we skipped until now. */
535 for (unsigned k = 0; k < stmt_idx; k++)
536 index_map->safe_push (k);
537
538 tree dummy = (*performed_splits)[sm_idx].dummy_decl;
539 for (unsigned j = sm_idx; j < performed_splits->length (); j++)
540 {
541 ipa_param_performed_split *caller_split
542 = &(*performed_splits)[j];
543 if (caller_split->dummy_decl != dummy)
544 break;
545
546 tree arg = gimple_call_arg (stmt, stmt_idx);
547 struct transitive_split_map tsm;
548 tsm.repl = arg;
549 tsm.base_index = base_index;
550 if (caller_split->unit_offset >= unit_offset_delta)
551 {
552 tsm.unit_offset
553 = (caller_split->unit_offset - unit_offset_delta);
554 trans_map->safe_push (tsm);
555 }
556
557 phony_arguments++;
558 stmt_idx++;
559 }
560 }
561 base_index++;
562 }
563 return phony_arguments;
564 }
565
566 /* Modify actual arguments of a function call in statement STMT, assuming it
567 calls CALLEE_DECL. CALLER_ADJ must be the description of parameter
568 adjustments of the caller or NULL if there are none. Return the new
569 statement that replaced the old one. When invoked, cfun and
570 current_function_decl have to be set to the caller. */
571
572 gcall *
573 ipa_param_adjustments::modify_call (gcall *stmt,
574 vec<ipa_param_performed_split,
575 va_gc> *performed_splits,
576 tree callee_decl, bool update_references)
577 {
578 unsigned len = vec_safe_length (m_adj_params);
579 auto_vec<tree, 16> vargs (len);
580 tree old_decl = gimple_call_fndecl (stmt);
581 unsigned old_nargs = gimple_call_num_args (stmt);
582 auto_vec<bool, 16> kept (old_nargs);
583 kept.quick_grow_cleared (old_nargs);
584
585 auto_vec <unsigned, 16> index_map;
586 auto_vec <transitive_split_map> trans_map;
587 bool transitive_remapping = false;
588
589 if (performed_splits)
590 {
591 unsigned removed = init_transitive_splits (performed_splits,
592 stmt, &index_map, &trans_map);
593 if (removed > 0)
594 {
595 transitive_remapping = true;
596 old_nargs -= removed;
597 }
598 }
599
600 cgraph_node *current_node = cgraph_node::get (current_function_decl);
601 if (update_references)
602 current_node->remove_stmt_references (stmt);
603
604 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
605 gimple_stmt_iterator prev_gsi = gsi;
606 gsi_prev (&prev_gsi);
607 for (unsigned i = 0; i < len; i++)
608 {
609 ipa_adjusted_param *apm = &(*m_adj_params)[i];
610 if (apm->op == IPA_PARAM_OP_COPY)
611 {
612 unsigned index = apm->base_index;
613 if (index >= old_nargs)
614 /* Can happen if the original call has argument mismatch,
615 ignore. */
616 continue;
617 if (transitive_remapping)
618 index = index_map[apm->base_index];
619
620 tree arg = gimple_call_arg (stmt, index);
621
622 vargs.quick_push (arg);
623 kept[index] = true;
624 continue;
625 }
626
627 /* At the moment the only user of IPA_PARAM_OP_NEW modifies calls itself.
628 If we ever want to support it during WPA IPA stage, we'll need a
629 mechanism to call into the IPA passes that introduced them. Currently
630 we simply mandate that IPA infrastructure understands all argument
631 modifications. Remember, edge redirection/modification is done only
632 once, not in steps for each pass modifying the callee like clone
633 materialization. */
634 gcc_assert (apm->op == IPA_PARAM_OP_SPLIT);
635
636 /* We have to handle transitive changes differently using the maps we
637 have created before. So look into them first. */
638 tree repl = NULL_TREE;
639 for (unsigned j = 0; j < trans_map.length (); j++)
640 if (trans_map[j].base_index == apm->base_index
641 && trans_map[j].unit_offset == apm->unit_offset)
642 {
643 repl = trans_map[j].repl;
644 break;
645 }
646 if (repl)
647 {
648 vargs.quick_push (repl);
649 continue;
650 }
651
652 unsigned index = apm->base_index;
653 if (index >= old_nargs)
654 /* Can happen if the original call has argument mismatch, ignore. */
655 continue;
656 if (transitive_remapping)
657 index = index_map[apm->base_index];
658 tree base = gimple_call_arg (stmt, index);
659
660 /* We create a new parameter out of the value of the old one, we can
661 do the following kind of transformations:
662
663 - A scalar passed by reference, potentially as a part of a larger
664 aggregate, is converted to a scalar passed by value.
665
666 - A part of an aggregate is passed instead of the whole aggregate. */
667
668 location_t loc = gimple_location (stmt);
669 tree off;
670 bool deref_base = false;
671 unsigned int deref_align = 0;
672 if (TREE_CODE (base) != ADDR_EXPR
673 && is_gimple_reg_type (TREE_TYPE (base)))
674 {
675 /* Detect type mismatches in calls in invalid programs and make a
676 poor attempt to gracefully convert them so that we don't ICE. */
677 if (!POINTER_TYPE_P (TREE_TYPE (base)))
678 base = force_value_to_type (ptr_type_node, base);
679
680 off = build_int_cst (apm->alias_ptr_type, apm->unit_offset);
681 }
682 else
683 {
684 bool addrof;
685 if (TREE_CODE (base) == ADDR_EXPR)
686 {
687 base = TREE_OPERAND (base, 0);
688 addrof = true;
689 }
690 else
691 addrof = false;
692
693 tree prev_base = base;
694 poly_int64 base_offset;
695 base = get_addr_base_and_unit_offset (base, &base_offset);
696
697 /* Aggregate arguments can have non-invariant addresses. */
698 if (!base)
699 {
700 base = build_fold_addr_expr (prev_base);
701 off = build_int_cst (apm->alias_ptr_type, apm->unit_offset);
702 }
703 else if (TREE_CODE (base) == MEM_REF)
704 {
705 if (!addrof)
706 {
707 deref_base = true;
708 deref_align = TYPE_ALIGN (TREE_TYPE (base));
709 }
710 off = build_int_cst (apm->alias_ptr_type,
711 base_offset + apm->unit_offset);
712 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
713 off);
714 base = TREE_OPERAND (base, 0);
715 }
716 else
717 {
718 off = build_int_cst (apm->alias_ptr_type,
719 base_offset + apm->unit_offset);
720 base = build_fold_addr_expr (base);
721 }
722 }
723
724 tree type = apm->type;
725 unsigned int align;
726 unsigned HOST_WIDE_INT misalign;
727
728 if (deref_base)
729 {
730 align = deref_align;
731 misalign = 0;
732 }
733 else
734 {
735 get_pointer_alignment_1 (base, &align, &misalign);
736 /* All users must make sure that we can be optimistic when it
737 comes to alignment in this case (by inspecting the final users
738 of these new parameters). */
739 if (TYPE_ALIGN (type) > align)
740 align = TYPE_ALIGN (type);
741 }
742 misalign
743 += (offset_int::from (wi::to_wide (off), SIGNED).to_short_addr ()
744 * BITS_PER_UNIT);
745 misalign = misalign & (align - 1);
746 if (misalign != 0)
747 align = least_bit_hwi (misalign);
748 if (align < TYPE_ALIGN (type))
749 type = build_aligned_type (type, align);
750 base = force_gimple_operand_gsi (&gsi, base,
751 true, NULL, true, GSI_SAME_STMT);
752 tree expr = fold_build2_loc (loc, MEM_REF, type, base, off);
753 REF_REVERSE_STORAGE_ORDER (expr) = apm->reverse;
754 /* If expr is not a valid gimple call argument emit
755 a load into a temporary. */
756 if (is_gimple_reg_type (TREE_TYPE (expr)))
757 {
758 gimple *tem = gimple_build_assign (NULL_TREE, expr);
759 if (gimple_in_ssa_p (cfun))
760 {
761 gimple_set_vuse (tem, gimple_vuse (stmt));
762 expr = make_ssa_name (TREE_TYPE (expr), tem);
763 }
764 else
765 expr = create_tmp_reg (TREE_TYPE (expr));
766 gimple_assign_set_lhs (tem, expr);
767 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
768 }
769 vargs.quick_push (expr);
770 }
771
772 if (m_always_copy_start >= 0)
773 for (unsigned i = m_always_copy_start; i < old_nargs; i++)
774 vargs.safe_push (gimple_call_arg (stmt, i));
775
776 /* For optimized away parameters, add on the caller side
777 before the call
778 DEBUG D#X => parm_Y(D)
779 stmts and associate D#X with parm in decl_debug_args_lookup
780 vector to say for debug info that if parameter parm had been passed,
781 it would have value parm_Y(D). */
782 if (MAY_HAVE_DEBUG_BIND_STMTS && old_decl && callee_decl)
783 {
784 vec<tree, va_gc> **debug_args = NULL;
785 unsigned i = 0;
786 for (tree old_parm = DECL_ARGUMENTS (old_decl);
787 old_parm && i < old_nargs && ((int) i) < m_always_copy_start;
788 old_parm = DECL_CHAIN (old_parm), i++)
789 {
790 if (!is_gimple_reg (old_parm) || kept[i])
791 continue;
792 tree origin = DECL_ORIGIN (old_parm);
793 tree arg;
794 if (transitive_remapping)
795 arg = gimple_call_arg (stmt, index_map[i]);
796 else
797 arg = gimple_call_arg (stmt, i);
798
799 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
800 {
801 if (!fold_convertible_p (TREE_TYPE (origin), arg))
802 continue;
803 tree rhs1;
804 if (TREE_CODE (arg) == SSA_NAME
805 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg))
806 && (rhs1
807 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (arg)))
808 && useless_type_conversion_p (TREE_TYPE (origin),
809 TREE_TYPE (rhs1)))
810 arg = rhs1;
811 else
812 arg = fold_convert_loc (gimple_location (stmt),
813 TREE_TYPE (origin), arg);
814 }
815 if (debug_args == NULL)
816 debug_args = decl_debug_args_insert (callee_decl);
817 unsigned int ix;
818 tree ddecl = NULL_TREE;
819 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
820 if (ddecl == origin)
821 {
822 ddecl = (**debug_args)[ix + 1];
823 break;
824 }
825 if (ddecl == NULL)
826 {
827 ddecl = make_node (DEBUG_EXPR_DECL);
828 DECL_ARTIFICIAL (ddecl) = 1;
829 TREE_TYPE (ddecl) = TREE_TYPE (origin);
830 SET_DECL_MODE (ddecl, DECL_MODE (origin));
831
832 vec_safe_push (*debug_args, origin);
833 vec_safe_push (*debug_args, ddecl);
834 }
835 gimple *def_temp = gimple_build_debug_bind (ddecl,
836 unshare_expr (arg), stmt);
837 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
838 }
839 }
840
841 if (dump_file && (dump_flags & TDF_DETAILS))
842 {
843 fprintf (dump_file, "replacing stmt:");
844 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
845 }
846
847 gcall *new_stmt = gimple_build_call_vec (callee_decl, vargs);
848
849 if (tree lhs = gimple_call_lhs (stmt))
850 {
851 if (!m_skip_return)
852 gimple_call_set_lhs (new_stmt, lhs);
853 else if (TREE_CODE (lhs) == SSA_NAME)
854 {
855 /* LHS should now by a default-def SSA. Unfortunately default-def
856 SSA_NAMEs need a backing variable (or at least some code examining
857 SSAs assumes it is non-NULL). So we either have to re-use the
858 decl we have at hand or introdice a new one. */
859 tree repl = create_tmp_var (TREE_TYPE (lhs), "removed_return");
860 repl = get_or_create_ssa_default_def (cfun, repl);
861 SSA_NAME_IS_DEFAULT_DEF (repl) = true;
862 imm_use_iterator ui;
863 use_operand_p use_p;
864 gimple *using_stmt;
865 FOR_EACH_IMM_USE_STMT (using_stmt, ui, lhs)
866 {
867 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
868 {
869 SET_USE (use_p, repl);
870 }
871 update_stmt (using_stmt);
872 }
873 }
874 }
875
876 gimple_set_block (new_stmt, gimple_block (stmt));
877 if (gimple_has_location (stmt))
878 gimple_set_location (new_stmt, gimple_location (stmt));
879 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
880 gimple_call_copy_flags (new_stmt, stmt);
881 if (gimple_in_ssa_p (cfun))
882 gimple_move_vops (new_stmt, stmt);
883
884 if (dump_file && (dump_flags & TDF_DETAILS))
885 {
886 fprintf (dump_file, "with stmt:");
887 print_gimple_stmt (dump_file, new_stmt, 0);
888 fprintf (dump_file, "\n");
889 }
890 gsi_replace (&gsi, new_stmt, true);
891 if (update_references)
892 do
893 {
894 current_node->record_stmt_references (gsi_stmt (gsi));
895 gsi_prev (&gsi);
896 }
897 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
898 return new_stmt;
899 }
900
901 /* Dump information contained in the object in textual form to F. */
902
903 void
904 ipa_param_adjustments::dump (FILE *f)
905 {
906 fprintf (f, " m_always_copy_start: %i\n", m_always_copy_start);
907 ipa_dump_adjusted_parameters (f, m_adj_params);
908 if (m_skip_return)
909 fprintf (f, " Will SKIP return.\n");
910 }
911
912 /* Dump information contained in the object in textual form to stderr. */
913
914 void
915 ipa_param_adjustments::debug ()
916 {
917 dump (stderr);
918 }
919
920 /* Register that REPLACEMENT should replace parameter described in APM and
921 optionally as DUMMY to mark transitive splits across calls. */
922
923 void
924 ipa_param_body_adjustments::register_replacement (ipa_adjusted_param *apm,
925 tree replacement,
926 tree dummy)
927 {
928 gcc_checking_assert (apm->op == IPA_PARAM_OP_SPLIT
929 || apm->op == IPA_PARAM_OP_NEW);
930 gcc_checking_assert (!apm->prev_clone_adjustment);
931 ipa_param_body_replacement psr;
932 psr.base = m_oparms[apm->prev_clone_index];
933 psr.repl = replacement;
934 psr.dummy = dummy;
935 psr.unit_offset = apm->unit_offset;
936 m_replacements.safe_push (psr);
937 }
938
939 /* Copy or not, as appropriate given m_id and decl context, a pre-existing
940 PARM_DECL T so that it can be included in the parameters of the modified
941 function. */
942
943 tree
944 ipa_param_body_adjustments::carry_over_param (tree t)
945 {
946 tree new_parm;
947 if (m_id)
948 {
949 new_parm = remap_decl (t, m_id);
950 if (TREE_CODE (new_parm) != PARM_DECL)
951 new_parm = m_id->copy_decl (t, m_id);
952 }
953 else if (DECL_CONTEXT (t) != m_fndecl)
954 {
955 new_parm = copy_node (t);
956 DECL_CONTEXT (new_parm) = m_fndecl;
957 }
958 else
959 new_parm = t;
960 return new_parm;
961 }
962
963 /* Common initialization performed by all ipa_param_body_adjustments
964 constructors. OLD_FNDECL is the declaration we take original arguments
965 from, (it may be the same as M_FNDECL). VARS, if non-NULL, is a pointer to
966 a chained list of new local variables. TREE_MAP is the IPA-CP produced
967 mapping of trees to constants.
968
969 The function is rather long but it really onlu initializes all data members
970 of the class. It creates new param DECLs, finds their new types, */
971
972 void
973 ipa_param_body_adjustments::common_initialization (tree old_fndecl,
974 tree *vars,
975 vec<ipa_replace_map *,
976 va_gc> *tree_map)
977 {
978 push_function_arg_decls (&m_oparms, old_fndecl);
979 auto_vec<tree,16> otypes;
980 if (TYPE_ARG_TYPES (TREE_TYPE (old_fndecl)) != NULL_TREE)
981 push_function_arg_types (&otypes, TREE_TYPE (old_fndecl));
982 else
983 {
984 auto_vec<tree,16> oparms;
985 push_function_arg_decls (&oparms, old_fndecl);
986 unsigned ocount = oparms.length ();
987 otypes.reserve_exact (ocount);
988 for (unsigned i = 0; i < ocount; i++)
989 otypes.quick_push (TREE_TYPE (oparms[i]));
990 }
991 fill_vector_of_new_param_types (&m_new_types, &otypes, m_adj_params, true);
992
993 auto_vec<bool, 16> kept;
994 kept.reserve_exact (m_oparms.length ());
995 kept.quick_grow_cleared (m_oparms.length ());
996 auto_vec<tree, 16> isra_dummy_decls;
997 isra_dummy_decls.reserve_exact (m_oparms.length ());
998 isra_dummy_decls.quick_grow_cleared (m_oparms.length ());
999
1000 unsigned adj_len = vec_safe_length (m_adj_params);
1001 m_method2func = ((TREE_CODE (TREE_TYPE (m_fndecl)) == METHOD_TYPE)
1002 && (adj_len == 0
1003 || (*m_adj_params)[0].op != IPA_PARAM_OP_COPY
1004 || (*m_adj_params)[0].base_index != 0));
1005
1006 /* The main job of the this function is to go over the vector of adjusted
1007 parameters and create declarations or find corresponding old ones and push
1008 them to m_new_decls. For IPA-SRA replacements it also creates
1009 corresponding m_id->dst_node->clone.performed_splits entries. */
1010
1011 m_new_decls.reserve_exact (adj_len);
1012 for (unsigned i = 0; i < adj_len ; i++)
1013 {
1014 ipa_adjusted_param *apm = &(*m_adj_params)[i];
1015 unsigned prev_index = apm->prev_clone_index;
1016 tree new_parm;
1017 if (apm->op == IPA_PARAM_OP_COPY
1018 || apm->prev_clone_adjustment)
1019 {
1020 kept[prev_index] = true;
1021 new_parm = carry_over_param (m_oparms[prev_index]);
1022 m_new_decls.quick_push (new_parm);
1023 }
1024 else if (apm->op == IPA_PARAM_OP_NEW
1025 || apm->op == IPA_PARAM_OP_SPLIT)
1026 {
1027 tree new_type = m_new_types[i];
1028 gcc_checking_assert (new_type);
1029 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
1030 new_type);
1031 const char *prefix = ipa_param_prefixes[apm->param_prefix_index];
1032 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
1033 DECL_ARTIFICIAL (new_parm) = 1;
1034 DECL_ARG_TYPE (new_parm) = new_type;
1035 DECL_CONTEXT (new_parm) = m_fndecl;
1036 TREE_USED (new_parm) = 1;
1037 DECL_IGNORED_P (new_parm) = 1;
1038 layout_decl (new_parm, 0);
1039 m_new_decls.quick_push (new_parm);
1040
1041 if (apm->op == IPA_PARAM_OP_SPLIT)
1042 {
1043 m_split_modifications_p = true;
1044
1045 if (m_id)
1046 {
1047 tree dummy_decl;
1048 if (!isra_dummy_decls[prev_index])
1049 {
1050 dummy_decl = copy_decl_to_var (m_oparms[prev_index],
1051 m_id);
1052 /* Any attempt to remap this dummy in this particular
1053 instance of clone materialization should yield
1054 itself. */
1055 insert_decl_map (m_id, dummy_decl, dummy_decl);
1056
1057 DECL_CHAIN (dummy_decl) = *vars;
1058 *vars = dummy_decl;
1059 isra_dummy_decls[prev_index] = dummy_decl;
1060 }
1061 else
1062 dummy_decl = isra_dummy_decls[prev_index];
1063
1064 register_replacement (apm, new_parm, dummy_decl);
1065 ipa_param_performed_split ps;
1066 ps.dummy_decl = dummy_decl;
1067 ps.unit_offset = apm->unit_offset;
1068 vec_safe_push (m_id->dst_node->clone.performed_splits, ps);
1069 }
1070 else
1071 register_replacement (apm, new_parm);
1072 }
1073 }
1074 else
1075 gcc_unreachable ();
1076 }
1077
1078
1079 /* As part of body modifications, we will also have to replace remaining uses
1080 of remaining uses of removed PARM_DECLs (which do not however use the
1081 initial value) with their VAR_DECL copies.
1082
1083 We do this differently with and without m_id. With m_id, we rely on its
1084 mapping and create a replacement straight away. Without it, we have our
1085 own mechanism for which we have to populate m_removed_decls vector. Just
1086 don't mix them, that is why you should not call
1087 replace_removed_params_ssa_names or perform_cfun_body_modifications when
1088 you construct with ID not equal to NULL. */
1089
1090 unsigned op_len = m_oparms.length ();
1091 for (unsigned i = 0; i < op_len; i++)
1092 if (!kept[i])
1093 {
1094 if (m_id)
1095 {
1096 if (!m_id->decl_map->get (m_oparms[i]))
1097 {
1098 /* TODO: Perhaps at least aggregate-type params could re-use
1099 their isra_dummy_decl here? */
1100 tree var = copy_decl_to_var (m_oparms[i], m_id);
1101 insert_decl_map (m_id, m_oparms[i], var);
1102 /* Declare this new variable. */
1103 DECL_CHAIN (var) = *vars;
1104 *vars = var;
1105 }
1106 }
1107 else
1108 {
1109 m_removed_decls.safe_push (m_oparms[i]);
1110 m_removed_map.put (m_oparms[i], m_removed_decls.length () - 1);
1111 }
1112 }
1113
1114 if (!MAY_HAVE_DEBUG_STMTS)
1115 return;
1116
1117 /* Finally, when generating debug info, we fill vector m_reset_debug_decls
1118 with removed parameters declarations. We do this in order to re-map their
1119 debug bind statements and create debug decls for them. */
1120
1121 if (tree_map)
1122 {
1123 /* Do not output debuginfo for parameter declarations as if they vanished
1124 when they were in fact replaced by a constant. */
1125 auto_vec <int, 16> index_mapping;
1126 bool need_remap = false;
1127
1128 if (m_id && m_id->src_node->clone.param_adjustments)
1129 {
1130 ipa_param_adjustments *prev_adjustments
1131 = m_id->src_node->clone.param_adjustments;
1132 prev_adjustments->get_updated_indices (&index_mapping);
1133 need_remap = true;
1134 }
1135
1136 for (unsigned i = 0; i < tree_map->length (); i++)
1137 {
1138 int parm_num = (*tree_map)[i]->parm_num;
1139 gcc_assert (parm_num >= 0);
1140 if (need_remap)
1141 parm_num = index_mapping[parm_num];
1142 kept[parm_num] = true;
1143 }
1144 }
1145
1146 for (unsigned i = 0; i < op_len; i++)
1147 if (!kept[i] && is_gimple_reg (m_oparms[i]))
1148 m_reset_debug_decls.safe_push (m_oparms[i]);
1149 }
1150
1151 /* Constructor of ipa_param_body_adjustments from a simple list of
1152 modifications to parameters listed in ADJ_PARAMS which will prepare ground
1153 for modification of parameters of fndecl. Return value of the function will
1154 not be removed and the object will assume it does not run as a part of
1155 tree-function_versioning. */
1156
1157 ipa_param_body_adjustments
1158 ::ipa_param_body_adjustments (vec<ipa_adjusted_param, va_gc> *adj_params,
1159 tree fndecl)
1160 : m_adj_params (adj_params), m_adjustments (NULL), m_reset_debug_decls (),
1161 m_split_modifications_p (false), m_fndecl (fndecl), m_id (NULL),
1162 m_oparms (), m_new_decls (), m_new_types (), m_replacements (),
1163 m_removed_decls (), m_removed_map (), m_method2func (false)
1164 {
1165 common_initialization (fndecl, NULL, NULL);
1166 }
1167
1168 /* Constructor of ipa_param_body_adjustments from ipa_param_adjustments in
1169 ADJUSTMENTS which will prepare ground for modification of parameters of
1170 fndecl. The object will assume it does not run as a part of
1171 tree-function_versioning. */
1172
1173 ipa_param_body_adjustments
1174 ::ipa_param_body_adjustments (ipa_param_adjustments *adjustments,
1175 tree fndecl)
1176 : m_adj_params (adjustments->m_adj_params), m_adjustments (adjustments),
1177 m_reset_debug_decls (), m_split_modifications_p (false), m_fndecl (fndecl),
1178 m_id (NULL), m_oparms (), m_new_decls (), m_new_types (),
1179 m_replacements (), m_removed_decls (), m_removed_map (),
1180 m_method2func (false)
1181 {
1182 common_initialization (fndecl, NULL, NULL);
1183 }
1184
1185 /* Constructor of ipa_param_body_adjustments which sets it up as a part of
1186 running tree_function_versioning. Planned modifications to the function are
1187 in ADJUSTMENTS. FNDECL designates the new function clone which is being
1188 modified. OLD_FNDECL is the function of which FNDECL is a clone (and which
1189 at the time of invocation still share DECL_ARGUMENTS). ID is the
1190 copy_body_data structure driving the wholy body copying process. VARS is a
1191 pointer to the head of the list of new local variables, TREE_MAP is the map
1192 that drives tree substitution in the cloning process. */
1193
1194 ipa_param_body_adjustments
1195 ::ipa_param_body_adjustments (ipa_param_adjustments *adjustments,
1196 tree fndecl, tree old_fndecl,
1197 copy_body_data *id, tree *vars,
1198 vec<ipa_replace_map *, va_gc> *tree_map)
1199 : m_adj_params (adjustments->m_adj_params), m_adjustments (adjustments),
1200 m_reset_debug_decls (), m_split_modifications_p (false), m_fndecl (fndecl),
1201 m_id (id), m_oparms (), m_new_decls (), m_new_types (), m_replacements (),
1202 m_removed_decls (), m_removed_map (), m_method2func (false)
1203 {
1204 common_initialization (old_fndecl, vars, tree_map);
1205 }
1206
1207 /* Chain new param decls up and return them. */
1208
1209 tree
1210 ipa_param_body_adjustments::get_new_param_chain ()
1211 {
1212 tree result;
1213 tree *link = &result;
1214
1215 unsigned len = vec_safe_length (m_adj_params);
1216 for (unsigned i = 0; i < len; i++)
1217 {
1218 tree new_decl = m_new_decls[i];
1219 *link = new_decl;
1220 link = &DECL_CHAIN (new_decl);
1221 }
1222 *link = NULL_TREE;
1223 return result;
1224 }
1225
1226 /* Modify the function parameters FNDECL and its type according to the plan in
1227 ADJUSTMENTS. This function needs to be called when the decl has not already
1228 been processed with ipa_param_adjustments::adjust_decl, otherwise just
1229 seting DECL_ARGUMENTS to whatever get_new_param_chain will do is enough. */
1230
1231 void
1232 ipa_param_body_adjustments::modify_formal_parameters ()
1233 {
1234 tree orig_type = TREE_TYPE (m_fndecl);
1235 DECL_ARGUMENTS (m_fndecl) = get_new_param_chain ();
1236
1237 /* When signature changes, we need to clear builtin info. */
1238 if (fndecl_built_in_p (m_fndecl))
1239 set_decl_built_in_function (m_fndecl, NOT_BUILT_IN, 0);
1240
1241 /* At this point, removing return value is only implemented when going
1242 through tree_function_versioning, not when modifying function body
1243 directly. */
1244 gcc_assert (!m_adjustments || !m_adjustments->m_skip_return);
1245 tree new_type = build_adjusted_function_type (orig_type, &m_new_types,
1246 m_method2func, false);
1247
1248 TREE_TYPE (m_fndecl) = new_type;
1249 DECL_VIRTUAL_P (m_fndecl) = 0;
1250 DECL_LANG_SPECIFIC (m_fndecl) = NULL;
1251 if (m_method2func)
1252 DECL_VINDEX (m_fndecl) = NULL_TREE;
1253 }
1254
1255 /* Given BASE and UNIT_OFFSET, find the corresponding record among replacement
1256 structures. */
1257
1258 ipa_param_body_replacement *
1259 ipa_param_body_adjustments::lookup_replacement_1 (tree base,
1260 unsigned unit_offset)
1261 {
1262 unsigned int len = m_replacements.length ();
1263 for (unsigned i = 0; i < len; i++)
1264 {
1265 ipa_param_body_replacement *pbr = &m_replacements[i];
1266
1267 if (pbr->base == base
1268 && (pbr->unit_offset == unit_offset))
1269 return pbr;
1270 }
1271 return NULL;
1272 }
1273
1274 /* Given BASE and UNIT_OFFSET, find the corresponding replacement expression
1275 and return it, assuming it is known it does not hold value by reference or
1276 in reverse storage order. */
1277
1278 tree
1279 ipa_param_body_adjustments::lookup_replacement (tree base, unsigned unit_offset)
1280 {
1281 ipa_param_body_replacement *pbr = lookup_replacement_1 (base, unit_offset);
1282 if (!pbr)
1283 return NULL;
1284 return pbr->repl;
1285 }
1286
1287 /* If T is an SSA_NAME, return NULL if it is not a default def or
1288 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
1289 the base variable is always returned, regardless if it is a default
1290 def. Return T if it is not an SSA_NAME. */
1291
1292 static tree
1293 get_ssa_base_param (tree t, bool ignore_default_def)
1294 {
1295 if (TREE_CODE (t) == SSA_NAME)
1296 {
1297 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
1298 return SSA_NAME_VAR (t);
1299 else
1300 return NULL_TREE;
1301 }
1302 return t;
1303 }
1304
1305 /* Given an expression, return the structure describing how it should be
1306 replaced if it accesses a part of a split parameter or NULL otherwise.
1307
1308 Do not free the result, it will be deallocated when the object is destroyed.
1309
1310 If IGNORE_DEFAULT_DEF is cleared, consider only SSA_NAMEs of PARM_DECLs
1311 which are default definitions, if set, consider all SSA_NAMEs of
1312 PARM_DECLs. */
1313
1314 ipa_param_body_replacement *
1315 ipa_param_body_adjustments::get_expr_replacement (tree expr,
1316 bool ignore_default_def)
1317 {
1318 tree base;
1319 unsigned unit_offset;
1320
1321 if (!isra_get_ref_base_and_offset (expr, &base, &unit_offset))
1322 return NULL;
1323
1324 base = get_ssa_base_param (base, ignore_default_def);
1325 if (!base || TREE_CODE (base) != PARM_DECL)
1326 return NULL;
1327 return lookup_replacement_1 (base, unit_offset);
1328 }
1329
1330 /* Given OLD_DECL, which is a PARM_DECL of a parameter that is being removed
1331 (which includes it being split or replaced), return a new variable that
1332 should be used for any SSA names that will remain in the function that
1333 previously belonged to OLD_DECL. */
1334
1335 tree
1336 ipa_param_body_adjustments::get_replacement_ssa_base (tree old_decl)
1337 {
1338 unsigned *idx = m_removed_map.get (old_decl);
1339 if (!idx)
1340 return NULL;
1341
1342 tree repl;
1343 if (TREE_CODE (m_removed_decls[*idx]) == PARM_DECL)
1344 {
1345 gcc_assert (m_removed_decls[*idx] == old_decl);
1346 repl = copy_var_decl (old_decl, DECL_NAME (old_decl),
1347 TREE_TYPE (old_decl));
1348 m_removed_decls[*idx] = repl;
1349 }
1350 else
1351 repl = m_removed_decls[*idx];
1352 return repl;
1353 }
1354
1355 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
1356 parameter which is to be removed because its value is not used, create a new
1357 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
1358 original with it and return it. If there is no need to re-map, return NULL.
1359 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
1360
1361 tree
1362 ipa_param_body_adjustments::replace_removed_params_ssa_names (tree old_name,
1363 gimple *stmt)
1364 {
1365 gcc_assert (!m_id);
1366 if (TREE_CODE (old_name) != SSA_NAME)
1367 return NULL;
1368
1369 tree decl = SSA_NAME_VAR (old_name);
1370 if (decl == NULL_TREE
1371 || TREE_CODE (decl) != PARM_DECL)
1372 return NULL;
1373
1374 tree repl = get_replacement_ssa_base (decl);
1375 if (!repl)
1376 return NULL;
1377
1378 tree new_name = make_ssa_name (repl, stmt);
1379 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
1380 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
1381
1382 if (dump_file && (dump_flags & TDF_DETAILS))
1383 {
1384 fprintf (dump_file, "replacing an SSA name of a removed param ");
1385 print_generic_expr (dump_file, old_name);
1386 fprintf (dump_file, " with ");
1387 print_generic_expr (dump_file, new_name);
1388 fprintf (dump_file, "\n");
1389 }
1390
1391 replace_uses_by (old_name, new_name);
1392 return new_name;
1393 }
1394
1395 /* If the expression *EXPR_P should be replaced, do so. CONVERT specifies
1396 whether the function should care about type incompatibility of the current
1397 and new expressions. If it is false, the function will leave
1398 incompatibility issues to the caller - note that when the function
1399 encounters a BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR, it will modify
1400 their bases instead of the expressions themselves and then also performs any
1401 necessary conversions. */
1402
1403 bool
1404 ipa_param_body_adjustments::modify_expression (tree *expr_p, bool convert)
1405 {
1406 tree expr = *expr_p;
1407
1408 if (TREE_CODE (expr) == BIT_FIELD_REF
1409 || TREE_CODE (expr) == IMAGPART_EXPR
1410 || TREE_CODE (expr) == REALPART_EXPR)
1411 {
1412 expr_p = &TREE_OPERAND (expr, 0);
1413 expr = *expr_p;
1414 convert = true;
1415 }
1416
1417 ipa_param_body_replacement *pbr = get_expr_replacement (expr, false);
1418 if (!pbr)
1419 return false;
1420
1421 tree repl = pbr->repl;
1422 if (dump_file && (dump_flags & TDF_DETAILS))
1423 {
1424 fprintf (dump_file, "About to replace expr ");
1425 print_generic_expr (dump_file, expr);
1426 fprintf (dump_file, " with ");
1427 print_generic_expr (dump_file, repl);
1428 fprintf (dump_file, "\n");
1429 }
1430
1431 if (convert && !useless_type_conversion_p (TREE_TYPE (expr),
1432 TREE_TYPE (repl)))
1433 {
1434 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expr), repl);
1435 *expr_p = vce;
1436 }
1437 else
1438 *expr_p = repl;
1439 return true;
1440 }
1441
1442 /* If the assignment statement STMT contains any expressions that need to
1443 replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
1444 potential type incompatibilities. If any conversion sttements have to be
1445 pre-pended to STMT, they will be added to EXTRA_STMTS. Return true iff the
1446 statement was modified. */
1447
1448 bool
1449 ipa_param_body_adjustments::modify_assignment (gimple *stmt,
1450 gimple_seq *extra_stmts)
1451 {
1452 tree *lhs_p, *rhs_p;
1453 bool any;
1454
1455 if (!gimple_assign_single_p (stmt))
1456 return false;
1457
1458 rhs_p = gimple_assign_rhs1_ptr (stmt);
1459 lhs_p = gimple_assign_lhs_ptr (stmt);
1460
1461 any = modify_expression (lhs_p, false);
1462 any |= modify_expression (rhs_p, false);
1463 if (any
1464 && !useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
1465 {
1466 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
1467 {
1468 /* V_C_Es of constructors can cause trouble (PR 42714). */
1469 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
1470 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
1471 else
1472 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
1473 NULL);
1474 }
1475 else
1476 {
1477 tree new_rhs = fold_build1_loc (gimple_location (stmt),
1478 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
1479 *rhs_p);
1480 tree tmp = force_gimple_operand (new_rhs, extra_stmts, true,
1481 NULL_TREE);
1482 gimple_assign_set_rhs1 (stmt, tmp);
1483 }
1484 return true;
1485 }
1486
1487 return any;
1488 }
1489
1490 /* Data passed to remap_split_decl_to_dummy through walk_tree. */
1491
1492 struct simple_tree_swap_info
1493 {
1494 /* Change FROM to TO. */
1495 tree from, to;
1496 /* And set DONE to true when doing so. */
1497 bool done;
1498 };
1499
1500 /* Simple remapper to remap a split parameter to the same expression based on a
1501 special dummy decl so that edge redirections can detect transitive splitting
1502 and finish them. */
1503
1504 static tree
1505 remap_split_decl_to_dummy (tree *tp, int *walk_subtrees, void *data)
1506 {
1507 tree t = *tp;
1508
1509 if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
1510 {
1511 struct simple_tree_swap_info *swapinfo
1512 = (struct simple_tree_swap_info *) data;
1513 if (t == swapinfo->from
1514 || (TREE_CODE (t) == SSA_NAME
1515 && SSA_NAME_VAR (t) == swapinfo->from))
1516 {
1517 *tp = swapinfo->to;
1518 swapinfo->done = true;
1519 }
1520 *walk_subtrees = 0;
1521 }
1522 else if (TYPE_P (t))
1523 *walk_subtrees = 0;
1524 else
1525 *walk_subtrees = 1;
1526 return NULL_TREE;
1527 }
1528
1529
1530 /* If the call statement pointed at by STMT_P contains any expressions that
1531 need to replaced with a different one as noted by ADJUSTMENTS, do so. f the
1532 statement needs to be rebuilt, do so. Return true if any modifications have
1533 been performed.
1534
1535 If the method is invoked as a part of IPA clone materialization and if any
1536 parameter split is transitive, i.e. it applies to the functin that is being
1537 modified and also to the callee of the statement, replace the parameter
1538 passed to old callee with an equivalent expression based on a dummy decl
1539 followed by PARM_DECLs representing the actual replacements. The actual
1540 replacements will be then converted into SSA_NAMEs and then
1541 ipa_param_adjustments::modify_call will find the appropriate ones and leave
1542 only those in the call. */
1543
1544 bool
1545 ipa_param_body_adjustments::modify_call_stmt (gcall **stmt_p)
1546 {
1547 gcall *stmt = *stmt_p;
1548 auto_vec <unsigned, 4> pass_through_args;
1549 auto_vec <unsigned, 4> pass_through_pbr_indices;
1550
1551 if (m_split_modifications_p && m_id)
1552 {
1553 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
1554 {
1555 tree t = gimple_call_arg (stmt, i);
1556 gcc_assert (TREE_CODE (t) != BIT_FIELD_REF
1557 && TREE_CODE (t) != IMAGPART_EXPR
1558 && TREE_CODE (t) != REALPART_EXPR);
1559
1560 tree base;
1561 unsigned unit_offset;
1562 if (!isra_get_ref_base_and_offset (t, &base, &unit_offset))
1563 continue;
1564
1565 bool by_ref = false;
1566 if (TREE_CODE (base) == SSA_NAME)
1567 {
1568 if (!SSA_NAME_IS_DEFAULT_DEF (base))
1569 continue;
1570 base = SSA_NAME_VAR (base);
1571 gcc_checking_assert (base);
1572 by_ref = true;
1573 }
1574 if (TREE_CODE (base) != PARM_DECL)
1575 continue;
1576
1577 bool base_among_replacements = false;
1578 unsigned j, repl_list_len = m_replacements.length ();
1579 for (j = 0; j < repl_list_len; j++)
1580 {
1581 ipa_param_body_replacement *pbr = &m_replacements[j];
1582 if (pbr->base == base)
1583 {
1584 base_among_replacements = true;
1585 break;
1586 }
1587 }
1588 if (!base_among_replacements)
1589 continue;
1590
1591 /* We still have to distinguish between an end-use that we have to
1592 transform now and a pass-through, which happens in the following
1593 two cases. */
1594
1595 /* TODO: After we adjust ptr_parm_has_nonarg_uses to also consider
1596 &MEM_REF[ssa_name + offset], we will also have to detect that case
1597 here. */
1598
1599 if (TREE_CODE (t) == SSA_NAME
1600 && SSA_NAME_IS_DEFAULT_DEF (t)
1601 && SSA_NAME_VAR (t)
1602 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL)
1603 {
1604 /* This must be a by_reference pass-through. */
1605 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
1606 pass_through_args.safe_push (i);
1607 pass_through_pbr_indices.safe_push (j);
1608 }
1609 else if (!by_ref && AGGREGATE_TYPE_P (TREE_TYPE (t)))
1610 {
1611 /* Currently IPA-SRA guarantees the aggregate access type
1612 exactly matches in this case. So if it does not match, it is
1613 a pass-through argument that will be sorted out at edge
1614 redirection time. */
1615 ipa_param_body_replacement *pbr
1616 = lookup_replacement_1 (base, unit_offset);
1617
1618 if (!pbr
1619 || (TYPE_MAIN_VARIANT (TREE_TYPE (t))
1620 != TYPE_MAIN_VARIANT (TREE_TYPE (pbr->repl))))
1621 {
1622 pass_through_args.safe_push (i);
1623 pass_through_pbr_indices.safe_push (j);
1624 }
1625 }
1626 }
1627 }
1628
1629 unsigned nargs = gimple_call_num_args (stmt);
1630 if (!pass_through_args.is_empty ())
1631 {
1632 auto_vec<tree, 16> vargs;
1633 unsigned pt_idx = 0;
1634 for (unsigned i = 0; i < nargs; i++)
1635 {
1636 if (pt_idx < pass_through_args.length ()
1637 && i == pass_through_args[pt_idx])
1638 {
1639 unsigned j = pass_through_pbr_indices[pt_idx];
1640 pt_idx++;
1641 tree base = m_replacements[j].base;
1642
1643 /* Map base will get mapped to the special transitive-isra marker
1644 dummy decl. */
1645 struct simple_tree_swap_info swapinfo;
1646 swapinfo.from = base;
1647 swapinfo.to = m_replacements[j].dummy;
1648 swapinfo.done = false;
1649 tree arg = gimple_call_arg (stmt, i);
1650 walk_tree (&arg, remap_split_decl_to_dummy, &swapinfo, NULL);
1651 gcc_assert (swapinfo.done);
1652 vargs.safe_push (arg);
1653 /* Now let's push all replacements pertaining to this parameter
1654 so that all gimple register ones get correct SSA_NAMES. Edge
1655 redirection will weed out the dummy argument as well as all
1656 unused replacements later. */
1657 unsigned int repl_list_len = m_replacements.length ();
1658 for (; j < repl_list_len; j++)
1659 {
1660 if (m_replacements[j].base != base)
1661 break;
1662 vargs.safe_push (m_replacements[j].repl);
1663 }
1664 }
1665 else
1666 {
1667 tree t = gimple_call_arg (stmt, i);
1668 modify_expression (&t, true);
1669 vargs.safe_push (t);
1670 }
1671 }
1672 gcall *new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
1673 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
1674 gimple_call_copy_flags (new_stmt, stmt);
1675 if (tree lhs = gimple_call_lhs (stmt))
1676 {
1677 modify_expression (&lhs, false);
1678 gimple_call_set_lhs (new_stmt, lhs);
1679 }
1680 *stmt_p = new_stmt;
1681 return true;
1682 }
1683
1684 /* Otherwise, no need to rebuild the statement, let's just modify arguments
1685 and the LHS if/as appropriate. */
1686 bool modified = false;
1687 for (unsigned i = 0; i < nargs; i++)
1688 {
1689 tree *t = gimple_call_arg_ptr (stmt, i);
1690 modified |= modify_expression (t, true);
1691 }
1692
1693 if (gimple_call_lhs (stmt))
1694 {
1695 tree *t = gimple_call_lhs_ptr (stmt);
1696 modified |= modify_expression (t, false);
1697 }
1698
1699 return modified;
1700 }
1701
1702 /* If the statement STMT contains any expressions that need to replaced with a
1703 different one as noted by ADJUSTMENTS, do so. Handle any potential type
1704 incompatibilities. If any conversion sttements have to be pre-pended to
1705 STMT, they will be added to EXTRA_STMTS. Return true iff the statement was
1706 modified. */
1707
1708 bool
1709 ipa_param_body_adjustments::modify_gimple_stmt (gimple **stmt,
1710 gimple_seq *extra_stmts)
1711 {
1712 bool modified = false;
1713 tree *t;
1714
1715 switch (gimple_code (*stmt))
1716 {
1717 case GIMPLE_RETURN:
1718 t = gimple_return_retval_ptr (as_a <greturn *> (*stmt));
1719 if (m_adjustments && m_adjustments->m_skip_return)
1720 *t = NULL_TREE;
1721 else if (*t != NULL_TREE)
1722 modified |= modify_expression (t, true);
1723 break;
1724
1725 case GIMPLE_ASSIGN:
1726 modified |= modify_assignment (*stmt, extra_stmts);
1727 break;
1728
1729 case GIMPLE_CALL:
1730 modified |= modify_call_stmt ((gcall **) stmt);
1731 break;
1732
1733 case GIMPLE_ASM:
1734 {
1735 gasm *asm_stmt = as_a <gasm *> (*stmt);
1736 for (unsigned i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1737 {
1738 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1739 modified |= modify_expression (t, true);
1740 }
1741 for (unsigned i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1742 {
1743 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1744 modified |= modify_expression (t, false);
1745 }
1746 }
1747 break;
1748
1749 default:
1750 break;
1751 }
1752 return modified;
1753 }
1754
1755
1756 /* Traverse body of the current function and perform the requested adjustments
1757 on its statements. Return true iff the CFG has been changed. */
1758
1759 bool
1760 ipa_param_body_adjustments::modify_cfun_body ()
1761 {
1762 bool cfg_changed = false;
1763 basic_block bb;
1764
1765 FOR_EACH_BB_FN (bb, cfun)
1766 {
1767 gimple_stmt_iterator gsi;
1768
1769 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1770 {
1771 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
1772 tree new_lhs, old_lhs = gimple_phi_result (phi);
1773 new_lhs = replace_removed_params_ssa_names (old_lhs, phi);
1774 if (new_lhs)
1775 {
1776 gimple_phi_set_result (phi, new_lhs);
1777 release_ssa_name (old_lhs);
1778 }
1779 }
1780
1781 gsi = gsi_start_bb (bb);
1782 while (!gsi_end_p (gsi))
1783 {
1784 gimple *stmt = gsi_stmt (gsi);
1785 gimple *stmt_copy = stmt;
1786 gimple_seq extra_stmts = NULL;
1787 bool modified = modify_gimple_stmt (&stmt, &extra_stmts);
1788 if (stmt != stmt_copy)
1789 {
1790 gcc_checking_assert (modified);
1791 gsi_replace (&gsi, stmt, false);
1792 }
1793 if (!gimple_seq_empty_p (extra_stmts))
1794 gsi_insert_seq_before (&gsi, extra_stmts, GSI_SAME_STMT);
1795
1796 def_operand_p defp;
1797 ssa_op_iter iter;
1798 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
1799 {
1800 tree old_def = DEF_FROM_PTR (defp);
1801 if (tree new_def = replace_removed_params_ssa_names (old_def,
1802 stmt))
1803 {
1804 SET_DEF (defp, new_def);
1805 release_ssa_name (old_def);
1806 modified = true;
1807 }
1808 }
1809
1810 if (modified)
1811 {
1812 update_stmt (stmt);
1813 if (maybe_clean_eh_stmt (stmt)
1814 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
1815 cfg_changed = true;
1816 }
1817 gsi_next (&gsi);
1818 }
1819 }
1820
1821 return cfg_changed;
1822 }
1823
1824 /* Call gimple_debug_bind_reset_value on all debug statements describing
1825 gimple register parameters that are being removed or replaced. */
1826
1827 void
1828 ipa_param_body_adjustments::reset_debug_stmts ()
1829 {
1830 int i, len;
1831 gimple_stmt_iterator *gsip = NULL, gsi;
1832
1833 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
1834 {
1835 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1836 gsip = &gsi;
1837 }
1838 len = m_reset_debug_decls.length ();
1839 for (i = 0; i < len; i++)
1840 {
1841 imm_use_iterator ui;
1842 gimple *stmt;
1843 gdebug *def_temp;
1844 tree name, vexpr, copy = NULL_TREE;
1845 use_operand_p use_p;
1846 tree decl = m_reset_debug_decls[i];
1847
1848 gcc_checking_assert (is_gimple_reg (decl));
1849 name = ssa_default_def (cfun, decl);
1850 vexpr = NULL;
1851 if (name)
1852 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
1853 {
1854 if (gimple_clobber_p (stmt))
1855 {
1856 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
1857 unlink_stmt_vdef (stmt);
1858 gsi_remove (&cgsi, true);
1859 release_defs (stmt);
1860 continue;
1861 }
1862 /* All other users must have been removed by function body
1863 modification. */
1864 gcc_assert (is_gimple_debug (stmt));
1865 if (vexpr == NULL && gsip != NULL)
1866 {
1867 vexpr = make_node (DEBUG_EXPR_DECL);
1868 def_temp = gimple_build_debug_source_bind (vexpr, decl, NULL);
1869 DECL_ARTIFICIAL (vexpr) = 1;
1870 TREE_TYPE (vexpr) = TREE_TYPE (name);
1871 SET_DECL_MODE (vexpr, DECL_MODE (decl));
1872 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
1873 }
1874 if (vexpr)
1875 {
1876 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
1877 SET_USE (use_p, vexpr);
1878 }
1879 else
1880 gimple_debug_bind_reset_value (stmt);
1881 update_stmt (stmt);
1882 }
1883 /* Create a VAR_DECL for debug info purposes. */
1884 if (!DECL_IGNORED_P (decl))
1885 {
1886 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1887 VAR_DECL, DECL_NAME (decl),
1888 TREE_TYPE (decl));
1889 if (DECL_PT_UID_SET_P (decl))
1890 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
1891 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
1892 TREE_READONLY (copy) = TREE_READONLY (decl);
1893 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
1894 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
1895 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
1896 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
1897 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
1898 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
1899 SET_DECL_RTL (copy, 0);
1900 TREE_USED (copy) = 1;
1901 DECL_CONTEXT (copy) = current_function_decl;
1902 add_local_decl (cfun, copy);
1903 DECL_CHAIN (copy)
1904 = BLOCK_VARS (DECL_INITIAL (current_function_decl));
1905 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
1906 }
1907 if (gsip != NULL && copy && target_for_debug_bind (decl))
1908 {
1909 gcc_assert (TREE_CODE (decl) == PARM_DECL);
1910 if (vexpr)
1911 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
1912 else
1913 def_temp = gimple_build_debug_source_bind (copy, decl,
1914 NULL);
1915 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
1916 }
1917 }
1918 }
1919
1920 /* Perform all necessary body changes to change signature, body and debug info
1921 of fun according to adjustments passed at construction. Return true if CFG
1922 was changed in any way. The main entry point for modification of standalone
1923 functions that is not part of IPA clone materialization. */
1924
1925 bool
1926 ipa_param_body_adjustments::perform_cfun_body_modifications ()
1927 {
1928 bool cfg_changed;
1929 modify_formal_parameters ();
1930 cfg_changed = modify_cfun_body ();
1931 reset_debug_stmts ();
1932
1933 return cfg_changed;
1934 }
1935