ipa-cp.c (ipcp_cloning_candidate_p): Use opt_for_fn.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "predict.h"
25 #include "vec.h"
26 #include "hashtab.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "tm.h"
30 #include "hard-reg-set.h"
31 #include "input.h"
32 #include "function.h"
33 #include "dominance.h"
34 #include "cfg.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "expr.h"
44 #include "stor-layout.h"
45 #include "print-tree.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "langhooks.h"
51 #include "target.h"
52 #include "hash-map.h"
53 #include "plugin-api.h"
54 #include "ipa-ref.h"
55 #include "cgraph.h"
56 #include "alloc-pool.h"
57 #include "ipa-prop.h"
58 #include "bitmap.h"
59 #include "gimple-ssa.h"
60 #include "tree-cfg.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "tree-into-ssa.h"
64 #include "tree-dfa.h"
65 #include "tree-pass.h"
66 #include "tree-inline.h"
67 #include "ipa-inline.h"
68 #include "flags.h"
69 #include "diagnostic.h"
70 #include "gimple-pretty-print.h"
71 #include "lto-streamer.h"
72 #include "data-streamer.h"
73 #include "tree-streamer.h"
74 #include "params.h"
75 #include "ipa-utils.h"
76 #include "stringpool.h"
77 #include "tree-ssanames.h"
78 #include "dbgcnt.h"
79 #include "domwalk.h"
80 #include "builtins.h"
81 #include "calls.h"
82
83 /* Intermediate information that we get from alias analysis about a particular
84 parameter in a particular basic_block. When a parameter or the memory it
85 references is marked modified, we use that information in all dominatd
86 blocks without cosulting alias analysis oracle. */
87
88 struct param_aa_status
89 {
90 /* Set when this structure contains meaningful information. If not, the
91 structure describing a dominating BB should be used instead. */
92 bool valid;
93
94 /* Whether we have seen something which might have modified the data in
95 question. PARM is for the parameter itself, REF is for data it points to
96 but using the alias type of individual accesses and PT is the same thing
97 but for computing aggregate pass-through functions using a very inclusive
98 ao_ref. */
99 bool parm_modified, ref_modified, pt_modified;
100 };
101
102 /* Information related to a given BB that used only when looking at function
103 body. */
104
105 struct ipa_bb_info
106 {
107 /* Call graph edges going out of this BB. */
108 vec<cgraph_edge *> cg_edges;
109 /* Alias analysis statuses of each formal parameter at this bb. */
110 vec<param_aa_status> param_aa_statuses;
111 };
112
113 /* Structure with global information that is only used when looking at function
114 body. */
115
116 struct func_body_info
117 {
118 /* The node that is being analyzed. */
119 cgraph_node *node;
120
121 /* Its info. */
122 struct ipa_node_params *info;
123
124 /* Information about individual BBs. */
125 vec<ipa_bb_info> bb_infos;
126
127 /* Number of parameters. */
128 int param_count;
129
130 /* Number of statements already walked by when analyzing this function. */
131 unsigned int aa_walked;
132 };
133
134 /* Vector where the parameter infos are actually stored. */
135 vec<ipa_node_params> ipa_node_params_vector;
136 /* Vector of known aggregate values in cloned nodes. */
137 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
138 /* Vector where the parameter infos are actually stored. */
139 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
140
141 /* Holders of ipa cgraph hooks: */
142 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
143 static struct cgraph_node_hook_list *node_removal_hook_holder;
144 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
145 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
146 static struct cgraph_node_hook_list *function_insertion_hook_holder;
147
148 /* Description of a reference to an IPA constant. */
149 struct ipa_cst_ref_desc
150 {
151 /* Edge that corresponds to the statement which took the reference. */
152 struct cgraph_edge *cs;
153 /* Linked list of duplicates created when call graph edges are cloned. */
154 struct ipa_cst_ref_desc *next_duplicate;
155 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
156 if out of control. */
157 int refcount;
158 };
159
160 /* Allocation pool for reference descriptions. */
161
162 static alloc_pool ipa_refdesc_pool;
163
164 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
165 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
166
167 static bool
168 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
169 {
170 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
171
172 if (!fs_opts)
173 return false;
174 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
175 }
176
177 /* Return index of the formal whose tree is PTREE in function which corresponds
178 to INFO. */
179
180 static int
181 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
182 {
183 int i, count;
184
185 count = descriptors.length ();
186 for (i = 0; i < count; i++)
187 if (descriptors[i].decl == ptree)
188 return i;
189
190 return -1;
191 }
192
193 /* Return index of the formal whose tree is PTREE in function which corresponds
194 to INFO. */
195
196 int
197 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
198 {
199 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
200 }
201
202 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
203 NODE. */
204
205 static void
206 ipa_populate_param_decls (struct cgraph_node *node,
207 vec<ipa_param_descriptor> &descriptors)
208 {
209 tree fndecl;
210 tree fnargs;
211 tree parm;
212 int param_num;
213
214 fndecl = node->decl;
215 gcc_assert (gimple_has_body_p (fndecl));
216 fnargs = DECL_ARGUMENTS (fndecl);
217 param_num = 0;
218 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
219 {
220 descriptors[param_num].decl = parm;
221 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
222 true);
223 param_num++;
224 }
225 }
226
227 /* Return how many formal parameters FNDECL has. */
228
229 int
230 count_formal_params (tree fndecl)
231 {
232 tree parm;
233 int count = 0;
234 gcc_assert (gimple_has_body_p (fndecl));
235
236 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
237 count++;
238
239 return count;
240 }
241
242 /* Return the declaration of Ith formal parameter of the function corresponding
243 to INFO. Note there is no setter function as this array is built just once
244 using ipa_initialize_node_params. */
245
246 void
247 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
248 {
249 fprintf (file, "param #%i", i);
250 if (info->descriptors[i].decl)
251 {
252 fprintf (file, " ");
253 print_generic_expr (file, info->descriptors[i].decl, 0);
254 }
255 }
256
257 /* Initialize the ipa_node_params structure associated with NODE
258 to hold PARAM_COUNT parameters. */
259
260 void
261 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
262 {
263 struct ipa_node_params *info = IPA_NODE_REF (node);
264
265 if (!info->descriptors.exists () && param_count)
266 info->descriptors.safe_grow_cleared (param_count);
267 }
268
269 /* Initialize the ipa_node_params structure associated with NODE by counting
270 the function parameters, creating the descriptors and populating their
271 param_decls. */
272
273 void
274 ipa_initialize_node_params (struct cgraph_node *node)
275 {
276 struct ipa_node_params *info = IPA_NODE_REF (node);
277
278 if (!info->descriptors.exists ())
279 {
280 ipa_alloc_node_params (node, count_formal_params (node->decl));
281 ipa_populate_param_decls (node, info->descriptors);
282 }
283 }
284
285 /* Print the jump functions associated with call graph edge CS to file F. */
286
287 static void
288 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
289 {
290 int i, count;
291
292 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
293 for (i = 0; i < count; i++)
294 {
295 struct ipa_jump_func *jump_func;
296 enum jump_func_type type;
297
298 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
299 type = jump_func->type;
300
301 fprintf (f, " param %d: ", i);
302 if (type == IPA_JF_UNKNOWN)
303 fprintf (f, "UNKNOWN\n");
304 else if (type == IPA_JF_CONST)
305 {
306 tree val = jump_func->value.constant.value;
307 fprintf (f, "CONST: ");
308 print_generic_expr (f, val, 0);
309 if (TREE_CODE (val) == ADDR_EXPR
310 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
311 {
312 fprintf (f, " -> ");
313 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
314 0);
315 }
316 fprintf (f, "\n");
317 }
318 else if (type == IPA_JF_PASS_THROUGH)
319 {
320 fprintf (f, "PASS THROUGH: ");
321 fprintf (f, "%d, op %s",
322 jump_func->value.pass_through.formal_id,
323 get_tree_code_name(jump_func->value.pass_through.operation));
324 if (jump_func->value.pass_through.operation != NOP_EXPR)
325 {
326 fprintf (f, " ");
327 print_generic_expr (f,
328 jump_func->value.pass_through.operand, 0);
329 }
330 if (jump_func->value.pass_through.agg_preserved)
331 fprintf (f, ", agg_preserved");
332 fprintf (f, "\n");
333 }
334 else if (type == IPA_JF_ANCESTOR)
335 {
336 fprintf (f, "ANCESTOR: ");
337 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC,
338 jump_func->value.ancestor.formal_id,
339 jump_func->value.ancestor.offset);
340 if (jump_func->value.ancestor.agg_preserved)
341 fprintf (f, ", agg_preserved");
342 fprintf (f, "\n");
343 }
344
345 if (jump_func->agg.items)
346 {
347 struct ipa_agg_jf_item *item;
348 int j;
349
350 fprintf (f, " Aggregate passed by %s:\n",
351 jump_func->agg.by_ref ? "reference" : "value");
352 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
353 {
354 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
355 item->offset);
356 if (TYPE_P (item->value))
357 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
358 tree_to_uhwi (TYPE_SIZE (item->value)));
359 else
360 {
361 fprintf (f, "cst: ");
362 print_generic_expr (f, item->value, 0);
363 }
364 fprintf (f, "\n");
365 }
366 }
367
368 struct ipa_polymorphic_call_context *ctx
369 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
370 if (ctx && !ctx->useless_p ())
371 {
372 fprintf (f, " Context: ");
373 ctx->dump (dump_file);
374 }
375 }
376 }
377
378
379 /* Print the jump functions of all arguments on all call graph edges going from
380 NODE to file F. */
381
382 void
383 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
384 {
385 struct cgraph_edge *cs;
386
387 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
388 node->order);
389 for (cs = node->callees; cs; cs = cs->next_callee)
390 {
391 if (!ipa_edge_args_info_available_for_edge_p (cs))
392 continue;
393
394 fprintf (f, " callsite %s/%i -> %s/%i : \n",
395 xstrdup (node->name ()), node->order,
396 xstrdup (cs->callee->name ()),
397 cs->callee->order);
398 ipa_print_node_jump_functions_for_edge (f, cs);
399 }
400
401 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
402 {
403 struct cgraph_indirect_call_info *ii;
404 if (!ipa_edge_args_info_available_for_edge_p (cs))
405 continue;
406
407 ii = cs->indirect_info;
408 if (ii->agg_contents)
409 fprintf (f, " indirect %s callsite, calling param %i, "
410 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
411 ii->member_ptr ? "member ptr" : "aggregate",
412 ii->param_index, ii->offset,
413 ii->by_ref ? "by reference" : "by_value");
414 else
415 fprintf (f, " indirect %s callsite, calling param %i, "
416 "offset " HOST_WIDE_INT_PRINT_DEC,
417 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
418 ii->offset);
419
420 if (cs->call_stmt)
421 {
422 fprintf (f, ", for stmt ");
423 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
424 }
425 else
426 fprintf (f, "\n");
427 if (ii->polymorphic)
428 ii->context.dump (f);
429 ipa_print_node_jump_functions_for_edge (f, cs);
430 }
431 }
432
433 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
434
435 void
436 ipa_print_all_jump_functions (FILE *f)
437 {
438 struct cgraph_node *node;
439
440 fprintf (f, "\nJump functions:\n");
441 FOR_EACH_FUNCTION (node)
442 {
443 ipa_print_node_jump_functions (f, node);
444 }
445 }
446
447 /* Set JFUNC to be a copy of another jmp (to be used by jump function
448 combination code). The two functions will share their rdesc. */
449
450 static void
451 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
452 struct ipa_jump_func *src)
453
454 {
455 gcc_checking_assert (src->type == IPA_JF_CONST);
456 dst->type = IPA_JF_CONST;
457 dst->value.constant = src->value.constant;
458 }
459
460 /* Set JFUNC to be a constant jmp function. */
461
462 static void
463 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
464 struct cgraph_edge *cs)
465 {
466 constant = unshare_expr (constant);
467 if (constant && EXPR_P (constant))
468 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
469 jfunc->type = IPA_JF_CONST;
470 jfunc->value.constant.value = unshare_expr_without_location (constant);
471
472 if (TREE_CODE (constant) == ADDR_EXPR
473 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
474 {
475 struct ipa_cst_ref_desc *rdesc;
476 if (!ipa_refdesc_pool)
477 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
478 sizeof (struct ipa_cst_ref_desc), 32);
479
480 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
481 rdesc->cs = cs;
482 rdesc->next_duplicate = NULL;
483 rdesc->refcount = 1;
484 jfunc->value.constant.rdesc = rdesc;
485 }
486 else
487 jfunc->value.constant.rdesc = NULL;
488 }
489
490 /* Set JFUNC to be a simple pass-through jump function. */
491 static void
492 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
493 bool agg_preserved)
494 {
495 jfunc->type = IPA_JF_PASS_THROUGH;
496 jfunc->value.pass_through.operand = NULL_TREE;
497 jfunc->value.pass_through.formal_id = formal_id;
498 jfunc->value.pass_through.operation = NOP_EXPR;
499 jfunc->value.pass_through.agg_preserved = agg_preserved;
500 }
501
502 /* Set JFUNC to be an arithmetic pass through jump function. */
503
504 static void
505 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
506 tree operand, enum tree_code operation)
507 {
508 jfunc->type = IPA_JF_PASS_THROUGH;
509 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
510 jfunc->value.pass_through.formal_id = formal_id;
511 jfunc->value.pass_through.operation = operation;
512 jfunc->value.pass_through.agg_preserved = false;
513 }
514
515 /* Set JFUNC to be an ancestor jump function. */
516
517 static void
518 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
519 int formal_id, bool agg_preserved)
520 {
521 jfunc->type = IPA_JF_ANCESTOR;
522 jfunc->value.ancestor.formal_id = formal_id;
523 jfunc->value.ancestor.offset = offset;
524 jfunc->value.ancestor.agg_preserved = agg_preserved;
525 }
526
527 /* Get IPA BB information about the given BB. FBI is the context of analyzis
528 of this function body. */
529
530 static struct ipa_bb_info *
531 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
532 {
533 gcc_checking_assert (fbi);
534 return &fbi->bb_infos[bb->index];
535 }
536
537 /* Structure to be passed in between detect_type_change and
538 check_stmt_for_type_change. */
539
540 struct prop_type_change_info
541 {
542 /* Offset into the object where there is the virtual method pointer we are
543 looking for. */
544 HOST_WIDE_INT offset;
545 /* The declaration or SSA_NAME pointer of the base that we are checking for
546 type change. */
547 tree object;
548 /* Set to true if dynamic type change has been detected. */
549 bool type_maybe_changed;
550 };
551
552 /* Return true if STMT can modify a virtual method table pointer.
553
554 This function makes special assumptions about both constructors and
555 destructors which are all the functions that are allowed to alter the VMT
556 pointers. It assumes that destructors begin with assignment into all VMT
557 pointers and that constructors essentially look in the following way:
558
559 1) The very first thing they do is that they call constructors of ancestor
560 sub-objects that have them.
561
562 2) Then VMT pointers of this and all its ancestors is set to new values
563 corresponding to the type corresponding to the constructor.
564
565 3) Only afterwards, other stuff such as constructor of member sub-objects
566 and the code written by the user is run. Only this may include calling
567 virtual functions, directly or indirectly.
568
569 There is no way to call a constructor of an ancestor sub-object in any
570 other way.
571
572 This means that we do not have to care whether constructors get the correct
573 type information because they will always change it (in fact, if we define
574 the type to be given by the VMT pointer, it is undefined).
575
576 The most important fact to derive from the above is that if, for some
577 statement in the section 3, we try to detect whether the dynamic type has
578 changed, we can safely ignore all calls as we examine the function body
579 backwards until we reach statements in section 2 because these calls cannot
580 be ancestor constructors or destructors (if the input is not bogus) and so
581 do not change the dynamic type (this holds true only for automatically
582 allocated objects but at the moment we devirtualize only these). We then
583 must detect that statements in section 2 change the dynamic type and can try
584 to derive the new type. That is enough and we can stop, we will never see
585 the calls into constructors of sub-objects in this code. Therefore we can
586 safely ignore all call statements that we traverse.
587 */
588
589 static bool
590 stmt_may_be_vtbl_ptr_store (gimple stmt)
591 {
592 if (is_gimple_call (stmt))
593 return false;
594 if (gimple_clobber_p (stmt))
595 return false;
596 else if (is_gimple_assign (stmt))
597 {
598 tree lhs = gimple_assign_lhs (stmt);
599
600 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
601 {
602 if (flag_strict_aliasing
603 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
604 return false;
605
606 if (TREE_CODE (lhs) == COMPONENT_REF
607 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
608 return false;
609 /* In the future we might want to use get_base_ref_and_offset to find
610 if there is a field corresponding to the offset and if so, proceed
611 almost like if it was a component ref. */
612 }
613 }
614 return true;
615 }
616
617 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
618 to check whether a particular statement may modify the virtual table
619 pointerIt stores its result into DATA, which points to a
620 prop_type_change_info structure. */
621
622 static bool
623 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
624 {
625 gimple stmt = SSA_NAME_DEF_STMT (vdef);
626 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
627
628 if (stmt_may_be_vtbl_ptr_store (stmt))
629 {
630 tci->type_maybe_changed = true;
631 return true;
632 }
633 else
634 return false;
635 }
636
637 /* See if ARG is PARAM_DECl describing instance passed by pointer
638 or reference in FUNCTION. Return false if the dynamic type may change
639 in between beggining of the function until CALL is invoked.
640
641 Generally functions are not allowed to change type of such instances,
642 but they call destructors. We assume that methods can not destroy the THIS
643 pointer. Also as a special cases, constructor and destructors may change
644 type of the THIS pointer. */
645
646 static bool
647 param_type_may_change_p (tree function, tree arg, gimple call)
648 {
649 /* Pure functions can not do any changes on the dynamic type;
650 that require writting to memory. */
651 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
652 return false;
653 /* We need to check if we are within inlined consturctor
654 or destructor (ideally we would have way to check that the
655 inline cdtor is actually working on ARG, but we don't have
656 easy tie on this, so punt on all non-pure cdtors.
657 We may also record the types of cdtors and once we know type
658 of the instance match them.
659
660 Also code unification optimizations may merge calls from
661 different blocks making return values unreliable. So
662 do nothing during late optimization. */
663 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
664 return true;
665 if (TREE_CODE (arg) == SSA_NAME
666 && SSA_NAME_IS_DEFAULT_DEF (arg)
667 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
668 {
669 /* Normal (non-THIS) argument. */
670 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
671 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
672 /* THIS pointer of an method - here we we want to watch constructors
673 and destructors as those definitely may change the dynamic
674 type. */
675 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
676 && !DECL_CXX_CONSTRUCTOR_P (function)
677 && !DECL_CXX_DESTRUCTOR_P (function)
678 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
679 {
680 /* Walk the inline stack and watch out for ctors/dtors. */
681 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
682 block = BLOCK_SUPERCONTEXT (block))
683 if (BLOCK_ABSTRACT_ORIGIN (block)
684 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
685 {
686 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
687
688 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
689 continue;
690 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
691 && (DECL_CXX_CONSTRUCTOR_P (fn)
692 || DECL_CXX_DESTRUCTOR_P (fn)))
693 return true;
694 }
695 return false;
696 }
697 }
698 return true;
699 }
700
701 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
702 callsite CALL) by looking for assignments to its virtual table pointer. If
703 it is, return true and fill in the jump function JFUNC with relevant type
704 information or set it to unknown. ARG is the object itself (not a pointer
705 to it, unless dereferenced). BASE is the base of the memory access as
706 returned by get_ref_base_and_extent, as is the offset.
707
708 This is helper function for detect_type_change and detect_type_change_ssa
709 that does the heavy work which is usually unnecesary. */
710
711 static bool
712 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
713 gimple call, struct ipa_jump_func *jfunc,
714 HOST_WIDE_INT offset)
715 {
716 struct prop_type_change_info tci;
717 ao_ref ao;
718 bool entry_reached = false;
719
720 gcc_checking_assert (DECL_P (arg)
721 || TREE_CODE (arg) == MEM_REF
722 || handled_component_p (arg));
723
724 comp_type = TYPE_MAIN_VARIANT (comp_type);
725
726 /* Const calls cannot call virtual methods through VMT and so type changes do
727 not matter. */
728 if (!flag_devirtualize || !gimple_vuse (call)
729 /* Be sure expected_type is polymorphic. */
730 || !comp_type
731 || TREE_CODE (comp_type) != RECORD_TYPE
732 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
733 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
734 return true;
735
736 ao_ref_init (&ao, arg);
737 ao.base = base;
738 ao.offset = offset;
739 ao.size = POINTER_SIZE;
740 ao.max_size = ao.size;
741
742 tci.offset = offset;
743 tci.object = get_base_address (arg);
744 tci.type_maybe_changed = false;
745
746 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
747 &tci, NULL, &entry_reached);
748 if (!tci.type_maybe_changed)
749 return false;
750
751 jfunc->type = IPA_JF_UNKNOWN;
752 return true;
753 }
754
755 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
756 If it is, return true and fill in the jump function JFUNC with relevant type
757 information or set it to unknown. ARG is the object itself (not a pointer
758 to it, unless dereferenced). BASE is the base of the memory access as
759 returned by get_ref_base_and_extent, as is the offset. */
760
761 static bool
762 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
763 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
764 {
765 if (!flag_devirtualize)
766 return false;
767
768 if (TREE_CODE (base) == MEM_REF
769 && !param_type_may_change_p (current_function_decl,
770 TREE_OPERAND (base, 0),
771 call))
772 return false;
773 return detect_type_change_from_memory_writes (arg, base, comp_type,
774 call, jfunc, offset);
775 }
776
777 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
778 SSA name (its dereference will become the base and the offset is assumed to
779 be zero). */
780
781 static bool
782 detect_type_change_ssa (tree arg, tree comp_type,
783 gimple call, struct ipa_jump_func *jfunc)
784 {
785 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
786 if (!flag_devirtualize
787 || !POINTER_TYPE_P (TREE_TYPE (arg)))
788 return false;
789
790 if (!param_type_may_change_p (current_function_decl, arg, call))
791 return false;
792
793 arg = build2 (MEM_REF, ptr_type_node, arg,
794 build_int_cst (ptr_type_node, 0));
795
796 return detect_type_change_from_memory_writes (arg, arg, comp_type,
797 call, jfunc, 0);
798 }
799
800 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
801 boolean variable pointed to by DATA. */
802
803 static bool
804 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
805 void *data)
806 {
807 bool *b = (bool *) data;
808 *b = true;
809 return true;
810 }
811
812 /* Return true if we have already walked so many statements in AA that we
813 should really just start giving up. */
814
815 static bool
816 aa_overwalked (struct func_body_info *fbi)
817 {
818 gcc_checking_assert (fbi);
819 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
820 }
821
822 /* Find the nearest valid aa status for parameter specified by INDEX that
823 dominates BB. */
824
825 static struct param_aa_status *
826 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
827 int index)
828 {
829 while (true)
830 {
831 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
832 if (!bb)
833 return NULL;
834 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
835 if (!bi->param_aa_statuses.is_empty ()
836 && bi->param_aa_statuses[index].valid)
837 return &bi->param_aa_statuses[index];
838 }
839 }
840
841 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
842 structures and/or intialize the result with a dominating description as
843 necessary. */
844
845 static struct param_aa_status *
846 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
847 int index)
848 {
849 gcc_checking_assert (fbi);
850 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
851 if (bi->param_aa_statuses.is_empty ())
852 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
853 struct param_aa_status *paa = &bi->param_aa_statuses[index];
854 if (!paa->valid)
855 {
856 gcc_checking_assert (!paa->parm_modified
857 && !paa->ref_modified
858 && !paa->pt_modified);
859 struct param_aa_status *dom_paa;
860 dom_paa = find_dominating_aa_status (fbi, bb, index);
861 if (dom_paa)
862 *paa = *dom_paa;
863 else
864 paa->valid = true;
865 }
866
867 return paa;
868 }
869
870 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
871 a value known not to be modified in this function before reaching the
872 statement STMT. FBI holds information about the function we have so far
873 gathered but do not survive the summary building stage. */
874
875 static bool
876 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
877 gimple stmt, tree parm_load)
878 {
879 struct param_aa_status *paa;
880 bool modified = false;
881 ao_ref refd;
882
883 /* FIXME: FBI can be NULL if we are being called from outside
884 ipa_node_analysis or ipcp_transform_function, which currently happens
885 during inlining analysis. It would be great to extend fbi's lifetime and
886 always have it. Currently, we are just not afraid of too much walking in
887 that case. */
888 if (fbi)
889 {
890 if (aa_overwalked (fbi))
891 return false;
892 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
893 if (paa->parm_modified)
894 return false;
895 }
896 else
897 paa = NULL;
898
899 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
900 ao_ref_init (&refd, parm_load);
901 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
902 &modified, NULL);
903 if (fbi)
904 fbi->aa_walked += walked;
905 if (paa && modified)
906 paa->parm_modified = true;
907 return !modified;
908 }
909
910 /* If STMT is an assignment that loads a value from an parameter declaration,
911 return the index of the parameter in ipa_node_params which has not been
912 modified. Otherwise return -1. */
913
914 static int
915 load_from_unmodified_param (struct func_body_info *fbi,
916 vec<ipa_param_descriptor> descriptors,
917 gimple stmt)
918 {
919 int index;
920 tree op1;
921
922 if (!gimple_assign_single_p (stmt))
923 return -1;
924
925 op1 = gimple_assign_rhs1 (stmt);
926 if (TREE_CODE (op1) != PARM_DECL)
927 return -1;
928
929 index = ipa_get_param_decl_index_1 (descriptors, op1);
930 if (index < 0
931 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
932 return -1;
933
934 return index;
935 }
936
937 /* Return true if memory reference REF (which must be a load through parameter
938 with INDEX) loads data that are known to be unmodified in this function
939 before reaching statement STMT. */
940
941 static bool
942 parm_ref_data_preserved_p (struct func_body_info *fbi,
943 int index, gimple stmt, tree ref)
944 {
945 struct param_aa_status *paa;
946 bool modified = false;
947 ao_ref refd;
948
949 /* FIXME: FBI can be NULL if we are being called from outside
950 ipa_node_analysis or ipcp_transform_function, which currently happens
951 during inlining analysis. It would be great to extend fbi's lifetime and
952 always have it. Currently, we are just not afraid of too much walking in
953 that case. */
954 if (fbi)
955 {
956 if (aa_overwalked (fbi))
957 return false;
958 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
959 if (paa->ref_modified)
960 return false;
961 }
962 else
963 paa = NULL;
964
965 gcc_checking_assert (gimple_vuse (stmt));
966 ao_ref_init (&refd, ref);
967 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
968 &modified, NULL);
969 if (fbi)
970 fbi->aa_walked += walked;
971 if (paa && modified)
972 paa->ref_modified = true;
973 return !modified;
974 }
975
976 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
977 is known to be unmodified in this function before reaching call statement
978 CALL into which it is passed. FBI describes the function body. */
979
980 static bool
981 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
982 gimple call, tree parm)
983 {
984 bool modified = false;
985 ao_ref refd;
986
987 /* It's unnecessary to calculate anything about memory contnets for a const
988 function because it is not goin to use it. But do not cache the result
989 either. Also, no such calculations for non-pointers. */
990 if (!gimple_vuse (call)
991 || !POINTER_TYPE_P (TREE_TYPE (parm))
992 || aa_overwalked (fbi))
993 return false;
994
995 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
996 index);
997 if (paa->pt_modified)
998 return false;
999
1000 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1001 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1002 &modified, NULL);
1003 fbi->aa_walked += walked;
1004 if (modified)
1005 paa->pt_modified = true;
1006 return !modified;
1007 }
1008
1009 /* Return true if we can prove that OP is a memory reference loading unmodified
1010 data from an aggregate passed as a parameter and if the aggregate is passed
1011 by reference, that the alias type of the load corresponds to the type of the
1012 formal parameter (so that we can rely on this type for TBAA in callers).
1013 INFO and PARMS_AINFO describe parameters of the current function (but the
1014 latter can be NULL), STMT is the load statement. If function returns true,
1015 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1016 within the aggregate and whether it is a load from a value passed by
1017 reference respectively. */
1018
1019 static bool
1020 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1021 vec<ipa_param_descriptor> descriptors,
1022 gimple stmt, tree op, int *index_p,
1023 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1024 bool *by_ref_p)
1025 {
1026 int index;
1027 HOST_WIDE_INT size, max_size;
1028 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1029
1030 if (max_size == -1 || max_size != size || *offset_p < 0)
1031 return false;
1032
1033 if (DECL_P (base))
1034 {
1035 int index = ipa_get_param_decl_index_1 (descriptors, base);
1036 if (index >= 0
1037 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1038 {
1039 *index_p = index;
1040 *by_ref_p = false;
1041 if (size_p)
1042 *size_p = size;
1043 return true;
1044 }
1045 return false;
1046 }
1047
1048 if (TREE_CODE (base) != MEM_REF
1049 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1050 || !integer_zerop (TREE_OPERAND (base, 1)))
1051 return false;
1052
1053 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1054 {
1055 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1056 index = ipa_get_param_decl_index_1 (descriptors, parm);
1057 }
1058 else
1059 {
1060 /* This branch catches situations where a pointer parameter is not a
1061 gimple register, for example:
1062
1063 void hip7(S*) (struct S * p)
1064 {
1065 void (*<T2e4>) (struct S *) D.1867;
1066 struct S * p.1;
1067
1068 <bb 2>:
1069 p.1_1 = p;
1070 D.1867_2 = p.1_1->f;
1071 D.1867_2 ();
1072 gdp = &p;
1073 */
1074
1075 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1076 index = load_from_unmodified_param (fbi, descriptors, def);
1077 }
1078
1079 if (index >= 0
1080 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1081 {
1082 *index_p = index;
1083 *by_ref_p = true;
1084 if (size_p)
1085 *size_p = size;
1086 return true;
1087 }
1088 return false;
1089 }
1090
1091 /* Just like the previous function, just without the param_analysis_info
1092 pointer, for users outside of this file. */
1093
1094 bool
1095 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1096 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1097 bool *by_ref_p)
1098 {
1099 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1100 offset_p, NULL, by_ref_p);
1101 }
1102
1103 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1104 of an assignment statement STMT, try to determine whether we are actually
1105 handling any of the following cases and construct an appropriate jump
1106 function into JFUNC if so:
1107
1108 1) The passed value is loaded from a formal parameter which is not a gimple
1109 register (most probably because it is addressable, the value has to be
1110 scalar) and we can guarantee the value has not changed. This case can
1111 therefore be described by a simple pass-through jump function. For example:
1112
1113 foo (int a)
1114 {
1115 int a.0;
1116
1117 a.0_2 = a;
1118 bar (a.0_2);
1119
1120 2) The passed value can be described by a simple arithmetic pass-through
1121 jump function. E.g.
1122
1123 foo (int a)
1124 {
1125 int D.2064;
1126
1127 D.2064_4 = a.1(D) + 4;
1128 bar (D.2064_4);
1129
1130 This case can also occur in combination of the previous one, e.g.:
1131
1132 foo (int a, int z)
1133 {
1134 int a.0;
1135 int D.2064;
1136
1137 a.0_3 = a;
1138 D.2064_4 = a.0_3 + 4;
1139 foo (D.2064_4);
1140
1141 3) The passed value is an address of an object within another one (which
1142 also passed by reference). Such situations are described by an ancestor
1143 jump function and describe situations such as:
1144
1145 B::foo() (struct B * const this)
1146 {
1147 struct A * D.1845;
1148
1149 D.1845_2 = &this_1(D)->D.1748;
1150 A::bar (D.1845_2);
1151
1152 INFO is the structure describing individual parameters access different
1153 stages of IPA optimizations. PARMS_AINFO contains the information that is
1154 only needed for intraprocedural analysis. */
1155
1156 static void
1157 compute_complex_assign_jump_func (struct func_body_info *fbi,
1158 struct ipa_node_params *info,
1159 struct ipa_jump_func *jfunc,
1160 gimple call, gimple stmt, tree name,
1161 tree param_type)
1162 {
1163 HOST_WIDE_INT offset, size, max_size;
1164 tree op1, tc_ssa, base, ssa;
1165 int index;
1166
1167 op1 = gimple_assign_rhs1 (stmt);
1168
1169 if (TREE_CODE (op1) == SSA_NAME)
1170 {
1171 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1172 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1173 else
1174 index = load_from_unmodified_param (fbi, info->descriptors,
1175 SSA_NAME_DEF_STMT (op1));
1176 tc_ssa = op1;
1177 }
1178 else
1179 {
1180 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1181 tc_ssa = gimple_assign_lhs (stmt);
1182 }
1183
1184 if (index >= 0)
1185 {
1186 tree op2 = gimple_assign_rhs2 (stmt);
1187
1188 if (op2)
1189 {
1190 if (!is_gimple_ip_invariant (op2)
1191 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1192 && !useless_type_conversion_p (TREE_TYPE (name),
1193 TREE_TYPE (op1))))
1194 return;
1195
1196 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1197 gimple_assign_rhs_code (stmt));
1198 }
1199 else if (gimple_assign_single_p (stmt))
1200 {
1201 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1202 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1203 }
1204 return;
1205 }
1206
1207 if (TREE_CODE (op1) != ADDR_EXPR)
1208 return;
1209 op1 = TREE_OPERAND (op1, 0);
1210 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1211 return;
1212 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1213 if (TREE_CODE (base) != MEM_REF
1214 /* If this is a varying address, punt. */
1215 || max_size == -1
1216 || max_size != size)
1217 return;
1218 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1219 ssa = TREE_OPERAND (base, 0);
1220 if (TREE_CODE (ssa) != SSA_NAME
1221 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1222 || offset < 0)
1223 return;
1224
1225 /* Dynamic types are changed in constructors and destructors. */
1226 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1227 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1228 ipa_set_ancestor_jf (jfunc, offset, index,
1229 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1230 }
1231
1232 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1233 it looks like:
1234
1235 iftmp.1_3 = &obj_2(D)->D.1762;
1236
1237 The base of the MEM_REF must be a default definition SSA NAME of a
1238 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1239 whole MEM_REF expression is returned and the offset calculated from any
1240 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1241 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1242
1243 static tree
1244 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1245 {
1246 HOST_WIDE_INT size, max_size;
1247 tree expr, parm, obj;
1248
1249 if (!gimple_assign_single_p (assign))
1250 return NULL_TREE;
1251 expr = gimple_assign_rhs1 (assign);
1252
1253 if (TREE_CODE (expr) != ADDR_EXPR)
1254 return NULL_TREE;
1255 expr = TREE_OPERAND (expr, 0);
1256 obj = expr;
1257 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1258
1259 if (TREE_CODE (expr) != MEM_REF
1260 /* If this is a varying address, punt. */
1261 || max_size == -1
1262 || max_size != size
1263 || *offset < 0)
1264 return NULL_TREE;
1265 parm = TREE_OPERAND (expr, 0);
1266 if (TREE_CODE (parm) != SSA_NAME
1267 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1268 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1269 return NULL_TREE;
1270
1271 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1272 *obj_p = obj;
1273 return expr;
1274 }
1275
1276
1277 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1278 statement PHI, try to find out whether NAME is in fact a
1279 multiple-inheritance typecast from a descendant into an ancestor of a formal
1280 parameter and thus can be described by an ancestor jump function and if so,
1281 write the appropriate function into JFUNC.
1282
1283 Essentially we want to match the following pattern:
1284
1285 if (obj_2(D) != 0B)
1286 goto <bb 3>;
1287 else
1288 goto <bb 4>;
1289
1290 <bb 3>:
1291 iftmp.1_3 = &obj_2(D)->D.1762;
1292
1293 <bb 4>:
1294 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1295 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1296 return D.1879_6; */
1297
1298 static void
1299 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1300 struct ipa_node_params *info,
1301 struct ipa_jump_func *jfunc,
1302 gimple call, gimple phi)
1303 {
1304 HOST_WIDE_INT offset;
1305 gimple assign, cond;
1306 basic_block phi_bb, assign_bb, cond_bb;
1307 tree tmp, parm, expr, obj;
1308 int index, i;
1309
1310 if (gimple_phi_num_args (phi) != 2)
1311 return;
1312
1313 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1314 tmp = PHI_ARG_DEF (phi, 0);
1315 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1316 tmp = PHI_ARG_DEF (phi, 1);
1317 else
1318 return;
1319 if (TREE_CODE (tmp) != SSA_NAME
1320 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1321 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1322 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1323 return;
1324
1325 assign = SSA_NAME_DEF_STMT (tmp);
1326 assign_bb = gimple_bb (assign);
1327 if (!single_pred_p (assign_bb))
1328 return;
1329 expr = get_ancestor_addr_info (assign, &obj, &offset);
1330 if (!expr)
1331 return;
1332 parm = TREE_OPERAND (expr, 0);
1333 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1334 if (index < 0)
1335 return;
1336
1337 cond_bb = single_pred (assign_bb);
1338 cond = last_stmt (cond_bb);
1339 if (!cond
1340 || gimple_code (cond) != GIMPLE_COND
1341 || gimple_cond_code (cond) != NE_EXPR
1342 || gimple_cond_lhs (cond) != parm
1343 || !integer_zerop (gimple_cond_rhs (cond)))
1344 return;
1345
1346 phi_bb = gimple_bb (phi);
1347 for (i = 0; i < 2; i++)
1348 {
1349 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1350 if (pred != assign_bb && pred != cond_bb)
1351 return;
1352 }
1353
1354 ipa_set_ancestor_jf (jfunc, offset, index,
1355 parm_ref_data_pass_through_p (fbi, index, call, parm));
1356 }
1357
1358 /* Inspect the given TYPE and return true iff it has the same structure (the
1359 same number of fields of the same types) as a C++ member pointer. If
1360 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1361 corresponding fields there. */
1362
1363 static bool
1364 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1365 {
1366 tree fld;
1367
1368 if (TREE_CODE (type) != RECORD_TYPE)
1369 return false;
1370
1371 fld = TYPE_FIELDS (type);
1372 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1373 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1374 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1375 return false;
1376
1377 if (method_ptr)
1378 *method_ptr = fld;
1379
1380 fld = DECL_CHAIN (fld);
1381 if (!fld || INTEGRAL_TYPE_P (fld)
1382 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1383 return false;
1384 if (delta)
1385 *delta = fld;
1386
1387 if (DECL_CHAIN (fld))
1388 return false;
1389
1390 return true;
1391 }
1392
1393 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1394 return the rhs of its defining statement. Otherwise return RHS as it
1395 is. */
1396
1397 static inline tree
1398 get_ssa_def_if_simple_copy (tree rhs)
1399 {
1400 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1401 {
1402 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1403
1404 if (gimple_assign_single_p (def_stmt))
1405 rhs = gimple_assign_rhs1 (def_stmt);
1406 else
1407 break;
1408 }
1409 return rhs;
1410 }
1411
1412 /* Simple linked list, describing known contents of an aggregate beforere
1413 call. */
1414
1415 struct ipa_known_agg_contents_list
1416 {
1417 /* Offset and size of the described part of the aggregate. */
1418 HOST_WIDE_INT offset, size;
1419 /* Known constant value or NULL if the contents is known to be unknown. */
1420 tree constant;
1421 /* Pointer to the next structure in the list. */
1422 struct ipa_known_agg_contents_list *next;
1423 };
1424
1425 /* Find the proper place in linked list of ipa_known_agg_contents_list
1426 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1427 unless there is a partial overlap, in which case return NULL, or such
1428 element is already there, in which case set *ALREADY_THERE to true. */
1429
1430 static struct ipa_known_agg_contents_list **
1431 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1432 HOST_WIDE_INT lhs_offset,
1433 HOST_WIDE_INT lhs_size,
1434 bool *already_there)
1435 {
1436 struct ipa_known_agg_contents_list **p = list;
1437 while (*p && (*p)->offset < lhs_offset)
1438 {
1439 if ((*p)->offset + (*p)->size > lhs_offset)
1440 return NULL;
1441 p = &(*p)->next;
1442 }
1443
1444 if (*p && (*p)->offset < lhs_offset + lhs_size)
1445 {
1446 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1447 /* We already know this value is subsequently overwritten with
1448 something else. */
1449 *already_there = true;
1450 else
1451 /* Otherwise this is a partial overlap which we cannot
1452 represent. */
1453 return NULL;
1454 }
1455 return p;
1456 }
1457
1458 /* Build aggregate jump function from LIST, assuming there are exactly
1459 CONST_COUNT constant entries there and that th offset of the passed argument
1460 is ARG_OFFSET and store it into JFUNC. */
1461
1462 static void
1463 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1464 int const_count, HOST_WIDE_INT arg_offset,
1465 struct ipa_jump_func *jfunc)
1466 {
1467 vec_alloc (jfunc->agg.items, const_count);
1468 while (list)
1469 {
1470 if (list->constant)
1471 {
1472 struct ipa_agg_jf_item item;
1473 item.offset = list->offset - arg_offset;
1474 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1475 item.value = unshare_expr_without_location (list->constant);
1476 jfunc->agg.items->quick_push (item);
1477 }
1478 list = list->next;
1479 }
1480 }
1481
1482 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1483 in ARG is filled in with constant values. ARG can either be an aggregate
1484 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1485 aggregate. JFUNC is the jump function into which the constants are
1486 subsequently stored. */
1487
1488 static void
1489 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1490 struct ipa_jump_func *jfunc)
1491 {
1492 struct ipa_known_agg_contents_list *list = NULL;
1493 int item_count = 0, const_count = 0;
1494 HOST_WIDE_INT arg_offset, arg_size;
1495 gimple_stmt_iterator gsi;
1496 tree arg_base;
1497 bool check_ref, by_ref;
1498 ao_ref r;
1499
1500 /* The function operates in three stages. First, we prepare check_ref, r,
1501 arg_base and arg_offset based on what is actually passed as an actual
1502 argument. */
1503
1504 if (POINTER_TYPE_P (arg_type))
1505 {
1506 by_ref = true;
1507 if (TREE_CODE (arg) == SSA_NAME)
1508 {
1509 tree type_size;
1510 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1511 return;
1512 check_ref = true;
1513 arg_base = arg;
1514 arg_offset = 0;
1515 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1516 arg_size = tree_to_uhwi (type_size);
1517 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1518 }
1519 else if (TREE_CODE (arg) == ADDR_EXPR)
1520 {
1521 HOST_WIDE_INT arg_max_size;
1522
1523 arg = TREE_OPERAND (arg, 0);
1524 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1525 &arg_max_size);
1526 if (arg_max_size == -1
1527 || arg_max_size != arg_size
1528 || arg_offset < 0)
1529 return;
1530 if (DECL_P (arg_base))
1531 {
1532 check_ref = false;
1533 ao_ref_init (&r, arg_base);
1534 }
1535 else
1536 return;
1537 }
1538 else
1539 return;
1540 }
1541 else
1542 {
1543 HOST_WIDE_INT arg_max_size;
1544
1545 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1546
1547 by_ref = false;
1548 check_ref = false;
1549 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1550 &arg_max_size);
1551 if (arg_max_size == -1
1552 || arg_max_size != arg_size
1553 || arg_offset < 0)
1554 return;
1555
1556 ao_ref_init (&r, arg);
1557 }
1558
1559 /* Second stage walks back the BB, looks at individual statements and as long
1560 as it is confident of how the statements affect contents of the
1561 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1562 describing it. */
1563 gsi = gsi_for_stmt (call);
1564 gsi_prev (&gsi);
1565 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1566 {
1567 struct ipa_known_agg_contents_list *n, **p;
1568 gimple stmt = gsi_stmt (gsi);
1569 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1570 tree lhs, rhs, lhs_base;
1571
1572 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1573 continue;
1574 if (!gimple_assign_single_p (stmt))
1575 break;
1576
1577 lhs = gimple_assign_lhs (stmt);
1578 rhs = gimple_assign_rhs1 (stmt);
1579 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1580 || TREE_CODE (lhs) == BIT_FIELD_REF
1581 || contains_bitfld_component_ref_p (lhs))
1582 break;
1583
1584 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1585 &lhs_max_size);
1586 if (lhs_max_size == -1
1587 || lhs_max_size != lhs_size)
1588 break;
1589
1590 if (check_ref)
1591 {
1592 if (TREE_CODE (lhs_base) != MEM_REF
1593 || TREE_OPERAND (lhs_base, 0) != arg_base
1594 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1595 break;
1596 }
1597 else if (lhs_base != arg_base)
1598 {
1599 if (DECL_P (lhs_base))
1600 continue;
1601 else
1602 break;
1603 }
1604
1605 bool already_there = false;
1606 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1607 &already_there);
1608 if (!p)
1609 break;
1610 if (already_there)
1611 continue;
1612
1613 rhs = get_ssa_def_if_simple_copy (rhs);
1614 n = XALLOCA (struct ipa_known_agg_contents_list);
1615 n->size = lhs_size;
1616 n->offset = lhs_offset;
1617 if (is_gimple_ip_invariant (rhs))
1618 {
1619 n->constant = rhs;
1620 const_count++;
1621 }
1622 else
1623 n->constant = NULL_TREE;
1624 n->next = *p;
1625 *p = n;
1626
1627 item_count++;
1628 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1629 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1630 break;
1631 }
1632
1633 /* Third stage just goes over the list and creates an appropriate vector of
1634 ipa_agg_jf_item structures out of it, of sourse only if there are
1635 any known constants to begin with. */
1636
1637 if (const_count)
1638 {
1639 jfunc->agg.by_ref = by_ref;
1640 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1641 }
1642 }
1643
1644 static tree
1645 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1646 {
1647 int n;
1648 tree type = (e->callee
1649 ? TREE_TYPE (e->callee->decl)
1650 : gimple_call_fntype (e->call_stmt));
1651 tree t = TYPE_ARG_TYPES (type);
1652
1653 for (n = 0; n < i; n++)
1654 {
1655 if (!t)
1656 break;
1657 t = TREE_CHAIN (t);
1658 }
1659 if (t)
1660 return TREE_VALUE (t);
1661 if (!e->callee)
1662 return NULL;
1663 t = DECL_ARGUMENTS (e->callee->decl);
1664 for (n = 0; n < i; n++)
1665 {
1666 if (!t)
1667 return NULL;
1668 t = TREE_CHAIN (t);
1669 }
1670 if (t)
1671 return TREE_TYPE (t);
1672 return NULL;
1673 }
1674
1675 /* Compute jump function for all arguments of callsite CS and insert the
1676 information in the jump_functions array in the ipa_edge_args corresponding
1677 to this callsite. */
1678
1679 static void
1680 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1681 struct cgraph_edge *cs)
1682 {
1683 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1684 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1685 gimple call = cs->call_stmt;
1686 int n, arg_num = gimple_call_num_args (call);
1687 bool useful_context = false;
1688
1689 if (arg_num == 0 || args->jump_functions)
1690 return;
1691 vec_safe_grow_cleared (args->jump_functions, arg_num);
1692 if (flag_devirtualize)
1693 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1694
1695 if (gimple_call_internal_p (call))
1696 return;
1697 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1698 return;
1699
1700 for (n = 0; n < arg_num; n++)
1701 {
1702 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1703 tree arg = gimple_call_arg (call, n);
1704 tree param_type = ipa_get_callee_param_type (cs, n);
1705 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1706 {
1707 tree instance;
1708 struct ipa_polymorphic_call_context context (cs->caller->decl,
1709 arg, cs->call_stmt,
1710 &instance);
1711 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1712 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1713 if (!context.useless_p ())
1714 useful_context = true;
1715 }
1716
1717 if (is_gimple_ip_invariant (arg))
1718 ipa_set_jf_constant (jfunc, arg, cs);
1719 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1720 && TREE_CODE (arg) == PARM_DECL)
1721 {
1722 int index = ipa_get_param_decl_index (info, arg);
1723
1724 gcc_assert (index >=0);
1725 /* Aggregate passed by value, check for pass-through, otherwise we
1726 will attempt to fill in aggregate contents later in this
1727 for cycle. */
1728 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1729 {
1730 ipa_set_jf_simple_pass_through (jfunc, index, false);
1731 continue;
1732 }
1733 }
1734 else if (TREE_CODE (arg) == SSA_NAME)
1735 {
1736 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1737 {
1738 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1739 if (index >= 0)
1740 {
1741 bool agg_p;
1742 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1743 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1744 }
1745 }
1746 else
1747 {
1748 gimple stmt = SSA_NAME_DEF_STMT (arg);
1749 if (is_gimple_assign (stmt))
1750 compute_complex_assign_jump_func (fbi, info, jfunc,
1751 call, stmt, arg, param_type);
1752 else if (gimple_code (stmt) == GIMPLE_PHI)
1753 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1754 call, stmt);
1755 }
1756 }
1757
1758 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1759 passed (because type conversions are ignored in gimple). Usually we can
1760 safely get type from function declaration, but in case of K&R prototypes or
1761 variadic functions we can try our luck with type of the pointer passed.
1762 TODO: Since we look for actual initialization of the memory object, we may better
1763 work out the type based on the memory stores we find. */
1764 if (!param_type)
1765 param_type = TREE_TYPE (arg);
1766
1767 if ((jfunc->type != IPA_JF_PASS_THROUGH
1768 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1769 && (jfunc->type != IPA_JF_ANCESTOR
1770 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1771 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1772 || POINTER_TYPE_P (param_type)))
1773 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1774 }
1775 if (!useful_context)
1776 vec_free (args->polymorphic_call_contexts);
1777 }
1778
1779 /* Compute jump functions for all edges - both direct and indirect - outgoing
1780 from BB. */
1781
1782 static void
1783 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1784 {
1785 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1786 int i;
1787 struct cgraph_edge *cs;
1788
1789 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1790 {
1791 struct cgraph_node *callee = cs->callee;
1792
1793 if (callee)
1794 {
1795 callee->ultimate_alias_target ();
1796 /* We do not need to bother analyzing calls to unknown functions
1797 unless they may become known during lto/whopr. */
1798 if (!callee->definition && !flag_lto)
1799 continue;
1800 }
1801 ipa_compute_jump_functions_for_edge (fbi, cs);
1802 }
1803 }
1804
1805 /* If STMT looks like a statement loading a value from a member pointer formal
1806 parameter, return that parameter and store the offset of the field to
1807 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1808 might be clobbered). If USE_DELTA, then we look for a use of the delta
1809 field rather than the pfn. */
1810
1811 static tree
1812 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1813 HOST_WIDE_INT *offset_p)
1814 {
1815 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1816
1817 if (!gimple_assign_single_p (stmt))
1818 return NULL_TREE;
1819
1820 rhs = gimple_assign_rhs1 (stmt);
1821 if (TREE_CODE (rhs) == COMPONENT_REF)
1822 {
1823 ref_field = TREE_OPERAND (rhs, 1);
1824 rhs = TREE_OPERAND (rhs, 0);
1825 }
1826 else
1827 ref_field = NULL_TREE;
1828 if (TREE_CODE (rhs) != MEM_REF)
1829 return NULL_TREE;
1830 rec = TREE_OPERAND (rhs, 0);
1831 if (TREE_CODE (rec) != ADDR_EXPR)
1832 return NULL_TREE;
1833 rec = TREE_OPERAND (rec, 0);
1834 if (TREE_CODE (rec) != PARM_DECL
1835 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1836 return NULL_TREE;
1837 ref_offset = TREE_OPERAND (rhs, 1);
1838
1839 if (use_delta)
1840 fld = delta_field;
1841 else
1842 fld = ptr_field;
1843 if (offset_p)
1844 *offset_p = int_bit_position (fld);
1845
1846 if (ref_field)
1847 {
1848 if (integer_nonzerop (ref_offset))
1849 return NULL_TREE;
1850 return ref_field == fld ? rec : NULL_TREE;
1851 }
1852 else
1853 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1854 : NULL_TREE;
1855 }
1856
1857 /* Returns true iff T is an SSA_NAME defined by a statement. */
1858
1859 static bool
1860 ipa_is_ssa_with_stmt_def (tree t)
1861 {
1862 if (TREE_CODE (t) == SSA_NAME
1863 && !SSA_NAME_IS_DEFAULT_DEF (t))
1864 return true;
1865 else
1866 return false;
1867 }
1868
1869 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1870 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1871 indirect call graph edge. */
1872
1873 static struct cgraph_edge *
1874 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1875 {
1876 struct cgraph_edge *cs;
1877
1878 cs = node->get_edge (stmt);
1879 cs->indirect_info->param_index = param_index;
1880 cs->indirect_info->agg_contents = 0;
1881 cs->indirect_info->member_ptr = 0;
1882 return cs;
1883 }
1884
1885 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1886 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1887 intermediate information about each formal parameter. Currently it checks
1888 whether the call calls a pointer that is a formal parameter and if so, the
1889 parameter is marked with the called flag and an indirect call graph edge
1890 describing the call is created. This is very simple for ordinary pointers
1891 represented in SSA but not-so-nice when it comes to member pointers. The
1892 ugly part of this function does nothing more than trying to match the
1893 pattern of such a call. An example of such a pattern is the gimple dump
1894 below, the call is on the last line:
1895
1896 <bb 2>:
1897 f$__delta_5 = f.__delta;
1898 f$__pfn_24 = f.__pfn;
1899
1900 or
1901 <bb 2>:
1902 f$__delta_5 = MEM[(struct *)&f];
1903 f$__pfn_24 = MEM[(struct *)&f + 4B];
1904
1905 and a few lines below:
1906
1907 <bb 5>
1908 D.2496_3 = (int) f$__pfn_24;
1909 D.2497_4 = D.2496_3 & 1;
1910 if (D.2497_4 != 0)
1911 goto <bb 3>;
1912 else
1913 goto <bb 4>;
1914
1915 <bb 6>:
1916 D.2500_7 = (unsigned int) f$__delta_5;
1917 D.2501_8 = &S + D.2500_7;
1918 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1919 D.2503_10 = *D.2502_9;
1920 D.2504_12 = f$__pfn_24 + -1;
1921 D.2505_13 = (unsigned int) D.2504_12;
1922 D.2506_14 = D.2503_10 + D.2505_13;
1923 D.2507_15 = *D.2506_14;
1924 iftmp.11_16 = (String:: *) D.2507_15;
1925
1926 <bb 7>:
1927 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1928 D.2500_19 = (unsigned int) f$__delta_5;
1929 D.2508_20 = &S + D.2500_19;
1930 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1931
1932 Such patterns are results of simple calls to a member pointer:
1933
1934 int doprinting (int (MyString::* f)(int) const)
1935 {
1936 MyString S ("somestring");
1937
1938 return (S.*f)(4);
1939 }
1940
1941 Moreover, the function also looks for called pointers loaded from aggregates
1942 passed by value or reference. */
1943
1944 static void
1945 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
1946 tree target)
1947 {
1948 struct ipa_node_params *info = fbi->info;
1949 HOST_WIDE_INT offset;
1950 bool by_ref;
1951
1952 if (SSA_NAME_IS_DEFAULT_DEF (target))
1953 {
1954 tree var = SSA_NAME_VAR (target);
1955 int index = ipa_get_param_decl_index (info, var);
1956 if (index >= 0)
1957 ipa_note_param_call (fbi->node, index, call);
1958 return;
1959 }
1960
1961 int index;
1962 gimple def = SSA_NAME_DEF_STMT (target);
1963 if (gimple_assign_single_p (def)
1964 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
1965 gimple_assign_rhs1 (def), &index, &offset,
1966 NULL, &by_ref))
1967 {
1968 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
1969 cs->indirect_info->offset = offset;
1970 cs->indirect_info->agg_contents = 1;
1971 cs->indirect_info->by_ref = by_ref;
1972 return;
1973 }
1974
1975 /* Now we need to try to match the complex pattern of calling a member
1976 pointer. */
1977 if (gimple_code (def) != GIMPLE_PHI
1978 || gimple_phi_num_args (def) != 2
1979 || !POINTER_TYPE_P (TREE_TYPE (target))
1980 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1981 return;
1982
1983 /* First, we need to check whether one of these is a load from a member
1984 pointer that is a parameter to this function. */
1985 tree n1 = PHI_ARG_DEF (def, 0);
1986 tree n2 = PHI_ARG_DEF (def, 1);
1987 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1988 return;
1989 gimple d1 = SSA_NAME_DEF_STMT (n1);
1990 gimple d2 = SSA_NAME_DEF_STMT (n2);
1991
1992 tree rec;
1993 basic_block bb, virt_bb;
1994 basic_block join = gimple_bb (def);
1995 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1996 {
1997 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1998 return;
1999
2000 bb = EDGE_PRED (join, 0)->src;
2001 virt_bb = gimple_bb (d2);
2002 }
2003 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2004 {
2005 bb = EDGE_PRED (join, 1)->src;
2006 virt_bb = gimple_bb (d1);
2007 }
2008 else
2009 return;
2010
2011 /* Second, we need to check that the basic blocks are laid out in the way
2012 corresponding to the pattern. */
2013
2014 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2015 || single_pred (virt_bb) != bb
2016 || single_succ (virt_bb) != join)
2017 return;
2018
2019 /* Third, let's see that the branching is done depending on the least
2020 significant bit of the pfn. */
2021
2022 gimple branch = last_stmt (bb);
2023 if (!branch || gimple_code (branch) != GIMPLE_COND)
2024 return;
2025
2026 if ((gimple_cond_code (branch) != NE_EXPR
2027 && gimple_cond_code (branch) != EQ_EXPR)
2028 || !integer_zerop (gimple_cond_rhs (branch)))
2029 return;
2030
2031 tree cond = gimple_cond_lhs (branch);
2032 if (!ipa_is_ssa_with_stmt_def (cond))
2033 return;
2034
2035 def = SSA_NAME_DEF_STMT (cond);
2036 if (!is_gimple_assign (def)
2037 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2038 || !integer_onep (gimple_assign_rhs2 (def)))
2039 return;
2040
2041 cond = gimple_assign_rhs1 (def);
2042 if (!ipa_is_ssa_with_stmt_def (cond))
2043 return;
2044
2045 def = SSA_NAME_DEF_STMT (cond);
2046
2047 if (is_gimple_assign (def)
2048 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2049 {
2050 cond = gimple_assign_rhs1 (def);
2051 if (!ipa_is_ssa_with_stmt_def (cond))
2052 return;
2053 def = SSA_NAME_DEF_STMT (cond);
2054 }
2055
2056 tree rec2;
2057 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2058 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2059 == ptrmemfunc_vbit_in_delta),
2060 NULL);
2061 if (rec != rec2)
2062 return;
2063
2064 index = ipa_get_param_decl_index (info, rec);
2065 if (index >= 0
2066 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2067 {
2068 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2069 cs->indirect_info->offset = offset;
2070 cs->indirect_info->agg_contents = 1;
2071 cs->indirect_info->member_ptr = 1;
2072 }
2073
2074 return;
2075 }
2076
2077 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2078 object referenced in the expression is a formal parameter of the caller
2079 FBI->node (described by FBI->info), create a call note for the
2080 statement. */
2081
2082 static void
2083 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2084 gimple call, tree target)
2085 {
2086 tree obj = OBJ_TYPE_REF_OBJECT (target);
2087 int index;
2088 HOST_WIDE_INT anc_offset;
2089
2090 if (!flag_devirtualize)
2091 return;
2092
2093 if (TREE_CODE (obj) != SSA_NAME)
2094 return;
2095
2096 struct ipa_node_params *info = fbi->info;
2097 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2098 {
2099 struct ipa_jump_func jfunc;
2100 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2101 return;
2102
2103 anc_offset = 0;
2104 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2105 gcc_assert (index >= 0);
2106 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2107 call, &jfunc))
2108 return;
2109 }
2110 else
2111 {
2112 struct ipa_jump_func jfunc;
2113 gimple stmt = SSA_NAME_DEF_STMT (obj);
2114 tree expr;
2115
2116 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2117 if (!expr)
2118 return;
2119 index = ipa_get_param_decl_index (info,
2120 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2121 gcc_assert (index >= 0);
2122 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2123 call, &jfunc, anc_offset))
2124 return;
2125 }
2126
2127 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2128 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2129 ii->offset = anc_offset;
2130 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2131 ii->otr_type = obj_type_ref_class (target);
2132 ii->polymorphic = 1;
2133 }
2134
2135 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2136 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2137 containing intermediate information about each formal parameter. */
2138
2139 static void
2140 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2141 {
2142 tree target = gimple_call_fn (call);
2143
2144 if (!target
2145 || (TREE_CODE (target) != SSA_NAME
2146 && !virtual_method_call_p (target)))
2147 return;
2148
2149 struct cgraph_edge *cs = fbi->node->get_edge (call);
2150 /* If we previously turned the call into a direct call, there is
2151 no need to analyze. */
2152 if (cs && !cs->indirect_unknown_callee)
2153 return;
2154
2155 if (cs->indirect_info->polymorphic)
2156 {
2157 tree instance;
2158 tree target = gimple_call_fn (call);
2159 ipa_polymorphic_call_context context (current_function_decl,
2160 target, call, &instance);
2161
2162 gcc_checking_assert (cs->indirect_info->otr_type
2163 == obj_type_ref_class (target));
2164 gcc_checking_assert (cs->indirect_info->otr_token
2165 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2166
2167 cs->indirect_info->vptr_changed
2168 = !context.get_dynamic_type (instance,
2169 OBJ_TYPE_REF_OBJECT (target),
2170 obj_type_ref_class (target), call);
2171 cs->indirect_info->context = context;
2172 }
2173
2174 if (TREE_CODE (target) == SSA_NAME)
2175 ipa_analyze_indirect_call_uses (fbi, call, target);
2176 else if (virtual_method_call_p (target))
2177 ipa_analyze_virtual_call_uses (fbi, call, target);
2178 }
2179
2180
2181 /* Analyze the call statement STMT with respect to formal parameters (described
2182 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2183 formal parameters are called. */
2184
2185 static void
2186 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2187 {
2188 if (is_gimple_call (stmt))
2189 ipa_analyze_call_uses (fbi, stmt);
2190 }
2191
2192 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2193 If OP is a parameter declaration, mark it as used in the info structure
2194 passed in DATA. */
2195
2196 static bool
2197 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2198 {
2199 struct ipa_node_params *info = (struct ipa_node_params *) data;
2200
2201 op = get_base_address (op);
2202 if (op
2203 && TREE_CODE (op) == PARM_DECL)
2204 {
2205 int index = ipa_get_param_decl_index (info, op);
2206 gcc_assert (index >= 0);
2207 ipa_set_param_used (info, index, true);
2208 }
2209
2210 return false;
2211 }
2212
2213 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2214 the findings in various structures of the associated ipa_node_params
2215 structure, such as parameter flags, notes etc. FBI holds various data about
2216 the function being analyzed. */
2217
2218 static void
2219 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2220 {
2221 gimple_stmt_iterator gsi;
2222 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2223 {
2224 gimple stmt = gsi_stmt (gsi);
2225
2226 if (is_gimple_debug (stmt))
2227 continue;
2228
2229 ipa_analyze_stmt_uses (fbi, stmt);
2230 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2231 visit_ref_for_mod_analysis,
2232 visit_ref_for_mod_analysis,
2233 visit_ref_for_mod_analysis);
2234 }
2235 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2236 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2237 visit_ref_for_mod_analysis,
2238 visit_ref_for_mod_analysis,
2239 visit_ref_for_mod_analysis);
2240 }
2241
2242 /* Calculate controlled uses of parameters of NODE. */
2243
2244 static void
2245 ipa_analyze_controlled_uses (struct cgraph_node *node)
2246 {
2247 struct ipa_node_params *info = IPA_NODE_REF (node);
2248
2249 for (int i = 0; i < ipa_get_param_count (info); i++)
2250 {
2251 tree parm = ipa_get_param (info, i);
2252 int controlled_uses = 0;
2253
2254 /* For SSA regs see if parameter is used. For non-SSA we compute
2255 the flag during modification analysis. */
2256 if (is_gimple_reg (parm))
2257 {
2258 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2259 parm);
2260 if (ddef && !has_zero_uses (ddef))
2261 {
2262 imm_use_iterator imm_iter;
2263 use_operand_p use_p;
2264
2265 ipa_set_param_used (info, i, true);
2266 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2267 if (!is_gimple_call (USE_STMT (use_p)))
2268 {
2269 if (!is_gimple_debug (USE_STMT (use_p)))
2270 {
2271 controlled_uses = IPA_UNDESCRIBED_USE;
2272 break;
2273 }
2274 }
2275 else
2276 controlled_uses++;
2277 }
2278 else
2279 controlled_uses = 0;
2280 }
2281 else
2282 controlled_uses = IPA_UNDESCRIBED_USE;
2283 ipa_set_controlled_uses (info, i, controlled_uses);
2284 }
2285 }
2286
2287 /* Free stuff in BI. */
2288
2289 static void
2290 free_ipa_bb_info (struct ipa_bb_info *bi)
2291 {
2292 bi->cg_edges.release ();
2293 bi->param_aa_statuses.release ();
2294 }
2295
2296 /* Dominator walker driving the analysis. */
2297
2298 class analysis_dom_walker : public dom_walker
2299 {
2300 public:
2301 analysis_dom_walker (struct func_body_info *fbi)
2302 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2303
2304 virtual void before_dom_children (basic_block);
2305
2306 private:
2307 struct func_body_info *m_fbi;
2308 };
2309
2310 void
2311 analysis_dom_walker::before_dom_children (basic_block bb)
2312 {
2313 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2314 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2315 }
2316
2317 /* Initialize the array describing properties of of formal parameters
2318 of NODE, analyze their uses and compute jump functions associated
2319 with actual arguments of calls from within NODE. */
2320
2321 void
2322 ipa_analyze_node (struct cgraph_node *node)
2323 {
2324 struct func_body_info fbi;
2325 struct ipa_node_params *info;
2326
2327 ipa_check_create_node_params ();
2328 ipa_check_create_edge_args ();
2329 info = IPA_NODE_REF (node);
2330
2331 if (info->analysis_done)
2332 return;
2333 info->analysis_done = 1;
2334
2335 if (ipa_func_spec_opts_forbid_analysis_p (node))
2336 {
2337 for (int i = 0; i < ipa_get_param_count (info); i++)
2338 {
2339 ipa_set_param_used (info, i, true);
2340 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2341 }
2342 return;
2343 }
2344
2345 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2346 push_cfun (func);
2347 calculate_dominance_info (CDI_DOMINATORS);
2348 ipa_initialize_node_params (node);
2349 ipa_analyze_controlled_uses (node);
2350
2351 fbi.node = node;
2352 fbi.info = IPA_NODE_REF (node);
2353 fbi.bb_infos = vNULL;
2354 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2355 fbi.param_count = ipa_get_param_count (info);
2356 fbi.aa_walked = 0;
2357
2358 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2359 {
2360 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2361 bi->cg_edges.safe_push (cs);
2362 }
2363
2364 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2365 {
2366 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2367 bi->cg_edges.safe_push (cs);
2368 }
2369
2370 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2371
2372 int i;
2373 struct ipa_bb_info *bi;
2374 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2375 free_ipa_bb_info (bi);
2376 fbi.bb_infos.release ();
2377 free_dominance_info (CDI_DOMINATORS);
2378 pop_cfun ();
2379 }
2380
2381 /* Update the jump functions associated with call graph edge E when the call
2382 graph edge CS is being inlined, assuming that E->caller is already (possibly
2383 indirectly) inlined into CS->callee and that E has not been inlined. */
2384
2385 static void
2386 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2387 struct cgraph_edge *e)
2388 {
2389 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2390 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2391 int count = ipa_get_cs_argument_count (args);
2392 int i;
2393
2394 for (i = 0; i < count; i++)
2395 {
2396 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2397 struct ipa_polymorphic_call_context *dst_ctx
2398 = ipa_get_ith_polymorhic_call_context (args, i);
2399
2400 if (dst->type == IPA_JF_ANCESTOR)
2401 {
2402 struct ipa_jump_func *src;
2403 int dst_fid = dst->value.ancestor.formal_id;
2404 struct ipa_polymorphic_call_context *src_ctx
2405 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2406
2407 /* Variable number of arguments can cause havoc if we try to access
2408 one that does not exist in the inlined edge. So make sure we
2409 don't. */
2410 if (dst_fid >= ipa_get_cs_argument_count (top))
2411 {
2412 dst->type = IPA_JF_UNKNOWN;
2413 continue;
2414 }
2415
2416 src = ipa_get_ith_jump_func (top, dst_fid);
2417
2418 if (src_ctx && !src_ctx->useless_p ())
2419 {
2420 struct ipa_polymorphic_call_context ctx = *src_ctx;
2421
2422 /* TODO: Make type preserved safe WRT contexts. */
2423 if (!ipa_get_jf_ancestor_type_preserved (dst))
2424 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2425 ctx.offset_by (dst->value.ancestor.offset);
2426 if (!ctx.useless_p ())
2427 {
2428 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2429 count);
2430 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2431 }
2432 dst_ctx->combine_with (ctx);
2433 }
2434
2435 if (src->agg.items
2436 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2437 {
2438 struct ipa_agg_jf_item *item;
2439 int j;
2440
2441 /* Currently we do not produce clobber aggregate jump functions,
2442 replace with merging when we do. */
2443 gcc_assert (!dst->agg.items);
2444
2445 dst->agg.items = vec_safe_copy (src->agg.items);
2446 dst->agg.by_ref = src->agg.by_ref;
2447 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2448 item->offset -= dst->value.ancestor.offset;
2449 }
2450
2451 if (src->type == IPA_JF_PASS_THROUGH
2452 && src->value.pass_through.operation == NOP_EXPR)
2453 {
2454 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2455 dst->value.ancestor.agg_preserved &=
2456 src->value.pass_through.agg_preserved;
2457 }
2458 else if (src->type == IPA_JF_ANCESTOR)
2459 {
2460 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2461 dst->value.ancestor.offset += src->value.ancestor.offset;
2462 dst->value.ancestor.agg_preserved &=
2463 src->value.ancestor.agg_preserved;
2464 }
2465 else
2466 dst->type = IPA_JF_UNKNOWN;
2467 }
2468 else if (dst->type == IPA_JF_PASS_THROUGH)
2469 {
2470 struct ipa_jump_func *src;
2471 /* We must check range due to calls with variable number of arguments
2472 and we cannot combine jump functions with operations. */
2473 if (dst->value.pass_through.operation == NOP_EXPR
2474 && (dst->value.pass_through.formal_id
2475 < ipa_get_cs_argument_count (top)))
2476 {
2477 int dst_fid = dst->value.pass_through.formal_id;
2478 src = ipa_get_ith_jump_func (top, dst_fid);
2479 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2480 struct ipa_polymorphic_call_context *src_ctx
2481 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2482
2483 if (src_ctx && !src_ctx->useless_p ())
2484 {
2485 struct ipa_polymorphic_call_context ctx = *src_ctx;
2486
2487 /* TODO: Make type preserved safe WRT contexts. */
2488 if (!ipa_get_jf_pass_through_type_preserved (dst))
2489 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2490 if (!ctx.useless_p ())
2491 {
2492 if (!dst_ctx)
2493 {
2494 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2495 count);
2496 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2497 }
2498 dst_ctx->combine_with (ctx);
2499 }
2500 }
2501 switch (src->type)
2502 {
2503 case IPA_JF_UNKNOWN:
2504 dst->type = IPA_JF_UNKNOWN;
2505 break;
2506 case IPA_JF_CONST:
2507 ipa_set_jf_cst_copy (dst, src);
2508 break;
2509
2510 case IPA_JF_PASS_THROUGH:
2511 {
2512 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2513 enum tree_code operation;
2514 operation = ipa_get_jf_pass_through_operation (src);
2515
2516 if (operation == NOP_EXPR)
2517 {
2518 bool agg_p;
2519 agg_p = dst_agg_p
2520 && ipa_get_jf_pass_through_agg_preserved (src);
2521 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2522 }
2523 else
2524 {
2525 tree operand = ipa_get_jf_pass_through_operand (src);
2526 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2527 operation);
2528 }
2529 break;
2530 }
2531 case IPA_JF_ANCESTOR:
2532 {
2533 bool agg_p;
2534 agg_p = dst_agg_p
2535 && ipa_get_jf_ancestor_agg_preserved (src);
2536 ipa_set_ancestor_jf (dst,
2537 ipa_get_jf_ancestor_offset (src),
2538 ipa_get_jf_ancestor_formal_id (src),
2539 agg_p);
2540 break;
2541 }
2542 default:
2543 gcc_unreachable ();
2544 }
2545
2546 if (src->agg.items
2547 && (dst_agg_p || !src->agg.by_ref))
2548 {
2549 /* Currently we do not produce clobber aggregate jump
2550 functions, replace with merging when we do. */
2551 gcc_assert (!dst->agg.items);
2552
2553 dst->agg.by_ref = src->agg.by_ref;
2554 dst->agg.items = vec_safe_copy (src->agg.items);
2555 }
2556 }
2557 else
2558 dst->type = IPA_JF_UNKNOWN;
2559 }
2560 }
2561 }
2562
2563 /* If TARGET is an addr_expr of a function declaration, make it the
2564 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2565 Otherwise, return NULL. */
2566
2567 struct cgraph_edge *
2568 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2569 bool speculative)
2570 {
2571 struct cgraph_node *callee;
2572 struct inline_edge_summary *es = inline_edge_summary (ie);
2573 bool unreachable = false;
2574
2575 if (TREE_CODE (target) == ADDR_EXPR)
2576 target = TREE_OPERAND (target, 0);
2577 if (TREE_CODE (target) != FUNCTION_DECL)
2578 {
2579 target = canonicalize_constructor_val (target, NULL);
2580 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2581 {
2582 if (ie->indirect_info->member_ptr)
2583 /* Member pointer call that goes through a VMT lookup. */
2584 return NULL;
2585
2586 if (dump_enabled_p ())
2587 {
2588 location_t loc = gimple_location_safe (ie->call_stmt);
2589 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2590 "discovered direct call to non-function in %s/%i, "
2591 "making it __builtin_unreachable\n",
2592 ie->caller->name (), ie->caller->order);
2593 }
2594
2595 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2596 callee = cgraph_node::get_create (target);
2597 unreachable = true;
2598 }
2599 else
2600 callee = cgraph_node::get (target);
2601 }
2602 else
2603 callee = cgraph_node::get (target);
2604
2605 /* Because may-edges are not explicitely represented and vtable may be external,
2606 we may create the first reference to the object in the unit. */
2607 if (!callee || callee->global.inlined_to)
2608 {
2609
2610 /* We are better to ensure we can refer to it.
2611 In the case of static functions we are out of luck, since we already
2612 removed its body. In the case of public functions we may or may
2613 not introduce the reference. */
2614 if (!canonicalize_constructor_val (target, NULL)
2615 || !TREE_PUBLIC (target))
2616 {
2617 if (dump_file)
2618 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2619 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2620 xstrdup (ie->caller->name ()),
2621 ie->caller->order,
2622 xstrdup (ie->callee->name ()),
2623 ie->callee->order);
2624 return NULL;
2625 }
2626 callee = cgraph_node::get_create (target);
2627 }
2628
2629 /* If the edge is already speculated. */
2630 if (speculative && ie->speculative)
2631 {
2632 struct cgraph_edge *e2;
2633 struct ipa_ref *ref;
2634 ie->speculative_call_info (e2, ie, ref);
2635 if (e2->callee->ultimate_alias_target ()
2636 != callee->ultimate_alias_target ())
2637 {
2638 if (dump_file)
2639 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2640 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2641 xstrdup (ie->caller->name ()),
2642 ie->caller->order,
2643 xstrdup (callee->name ()),
2644 callee->order,
2645 xstrdup (e2->callee->name ()),
2646 e2->callee->order);
2647 }
2648 else
2649 {
2650 if (dump_file)
2651 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2652 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2653 xstrdup (ie->caller->name ()),
2654 ie->caller->order,
2655 xstrdup (callee->name ()),
2656 callee->order);
2657 }
2658 return NULL;
2659 }
2660
2661 if (!dbg_cnt (devirt))
2662 return NULL;
2663
2664 ipa_check_create_node_params ();
2665
2666 /* We can not make edges to inline clones. It is bug that someone removed
2667 the cgraph node too early. */
2668 gcc_assert (!callee->global.inlined_to);
2669
2670 if (dump_file && !unreachable)
2671 {
2672 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2673 "(%s/%i -> %s/%i), for stmt ",
2674 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2675 speculative ? "speculative" : "known",
2676 xstrdup (ie->caller->name ()),
2677 ie->caller->order,
2678 xstrdup (callee->name ()),
2679 callee->order);
2680 if (ie->call_stmt)
2681 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2682 else
2683 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2684 }
2685 if (dump_enabled_p ())
2686 {
2687 location_t loc = gimple_location_safe (ie->call_stmt);
2688
2689 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2690 "converting indirect call in %s to direct call to %s\n",
2691 ie->caller->name (), callee->name ());
2692 }
2693 if (!speculative)
2694 ie = ie->make_direct (callee);
2695 else
2696 {
2697 if (!callee->can_be_discarded_p ())
2698 {
2699 cgraph_node *alias;
2700 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2701 if (alias)
2702 callee = alias;
2703 }
2704 ie = ie->make_speculative
2705 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2706 }
2707 es = inline_edge_summary (ie);
2708 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2709 - eni_size_weights.call_cost);
2710 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2711 - eni_time_weights.call_cost);
2712
2713 return ie;
2714 }
2715
2716 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2717 return NULL if there is not any. BY_REF specifies whether the value has to
2718 be passed by reference or by value. */
2719
2720 tree
2721 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2722 HOST_WIDE_INT offset, bool by_ref)
2723 {
2724 struct ipa_agg_jf_item *item;
2725 int i;
2726
2727 if (by_ref != agg->by_ref)
2728 return NULL;
2729
2730 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2731 if (item->offset == offset)
2732 {
2733 /* Currently we do not have clobber values, return NULL for them once
2734 we do. */
2735 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2736 return item->value;
2737 }
2738 return NULL;
2739 }
2740
2741 /* Remove a reference to SYMBOL from the list of references of a node given by
2742 reference description RDESC. Return true if the reference has been
2743 successfully found and removed. */
2744
2745 static bool
2746 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2747 {
2748 struct ipa_ref *to_del;
2749 struct cgraph_edge *origin;
2750
2751 origin = rdesc->cs;
2752 if (!origin)
2753 return false;
2754 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2755 origin->lto_stmt_uid);
2756 if (!to_del)
2757 return false;
2758
2759 to_del->remove_reference ();
2760 if (dump_file)
2761 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2762 xstrdup (origin->caller->name ()),
2763 origin->caller->order, xstrdup (symbol->name ()));
2764 return true;
2765 }
2766
2767 /* If JFUNC has a reference description with refcount different from
2768 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2769 NULL. JFUNC must be a constant jump function. */
2770
2771 static struct ipa_cst_ref_desc *
2772 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2773 {
2774 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2775 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2776 return rdesc;
2777 else
2778 return NULL;
2779 }
2780
2781 /* If the value of constant jump function JFUNC is an address of a function
2782 declaration, return the associated call graph node. Otherwise return
2783 NULL. */
2784
2785 static cgraph_node *
2786 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2787 {
2788 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2789 tree cst = ipa_get_jf_constant (jfunc);
2790 if (TREE_CODE (cst) != ADDR_EXPR
2791 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2792 return NULL;
2793
2794 return cgraph_node::get (TREE_OPERAND (cst, 0));
2795 }
2796
2797
2798 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2799 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2800 the edge specified in the rdesc. Return false if either the symbol or the
2801 reference could not be found, otherwise return true. */
2802
2803 static bool
2804 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2805 {
2806 struct ipa_cst_ref_desc *rdesc;
2807 if (jfunc->type == IPA_JF_CONST
2808 && (rdesc = jfunc_rdesc_usable (jfunc))
2809 && --rdesc->refcount == 0)
2810 {
2811 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2812 if (!symbol)
2813 return false;
2814
2815 return remove_described_reference (symbol, rdesc);
2816 }
2817 return true;
2818 }
2819
2820 /* Try to find a destination for indirect edge IE that corresponds to a simple
2821 call or a call of a member function pointer and where the destination is a
2822 pointer formal parameter described by jump function JFUNC. If it can be
2823 determined, return the newly direct edge, otherwise return NULL.
2824 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2825
2826 static struct cgraph_edge *
2827 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2828 struct ipa_jump_func *jfunc,
2829 struct ipa_node_params *new_root_info)
2830 {
2831 struct cgraph_edge *cs;
2832 tree target;
2833 bool agg_contents = ie->indirect_info->agg_contents;
2834
2835 if (ie->indirect_info->agg_contents)
2836 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2837 ie->indirect_info->offset,
2838 ie->indirect_info->by_ref);
2839 else
2840 target = ipa_value_from_jfunc (new_root_info, jfunc);
2841 if (!target)
2842 return NULL;
2843 cs = ipa_make_edge_direct_to_target (ie, target);
2844
2845 if (cs && !agg_contents)
2846 {
2847 bool ok;
2848 gcc_checking_assert (cs->callee
2849 && (cs != ie
2850 || jfunc->type != IPA_JF_CONST
2851 || !cgraph_node_for_jfunc (jfunc)
2852 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2853 ok = try_decrement_rdesc_refcount (jfunc);
2854 gcc_checking_assert (ok);
2855 }
2856
2857 return cs;
2858 }
2859
2860 /* Return the target to be used in cases of impossible devirtualization. IE
2861 and target (the latter can be NULL) are dumped when dumping is enabled. */
2862
2863 tree
2864 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2865 {
2866 if (dump_file)
2867 {
2868 if (target)
2869 fprintf (dump_file,
2870 "Type inconsistent devirtualization: %s/%i->%s\n",
2871 ie->caller->name (), ie->caller->order,
2872 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2873 else
2874 fprintf (dump_file,
2875 "No devirtualization target in %s/%i\n",
2876 ie->caller->name (), ie->caller->order);
2877 }
2878 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2879 cgraph_node::get_create (new_target);
2880 return new_target;
2881 }
2882
2883 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2884 call based on a formal parameter which is described by jump function JFUNC
2885 and if it can be determined, make it direct and return the direct edge.
2886 Otherwise, return NULL. CTX describes the polymorphic context that the
2887 parameter the call is based on brings along with it. */
2888
2889 static struct cgraph_edge *
2890 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2891 struct ipa_jump_func *jfunc,
2892 struct ipa_polymorphic_call_context ctx)
2893 {
2894 tree target = NULL;
2895 bool speculative = false;
2896
2897 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2898 return NULL;
2899
2900 gcc_assert (!ie->indirect_info->by_ref);
2901
2902 /* Try to do lookup via known virtual table pointer value. */
2903 if (!ie->indirect_info->vptr_changed
2904 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
2905 {
2906 tree vtable;
2907 unsigned HOST_WIDE_INT offset;
2908 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2909 ie->indirect_info->offset,
2910 true);
2911 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2912 {
2913 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2914 vtable, offset);
2915 if (t)
2916 {
2917 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2918 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2919 || !possible_polymorphic_call_target_p
2920 (ie, cgraph_node::get (t)))
2921 {
2922 /* Do not speculate builtin_unreachable, it is stpid! */
2923 if (!ie->indirect_info->vptr_changed)
2924 target = ipa_impossible_devirt_target (ie, target);
2925 }
2926 else
2927 {
2928 target = t;
2929 speculative = ie->indirect_info->vptr_changed;
2930 }
2931 }
2932 }
2933 }
2934
2935 ipa_polymorphic_call_context ie_context (ie);
2936 vec <cgraph_node *>targets;
2937 bool final;
2938
2939 ctx.offset_by (ie->indirect_info->offset);
2940 if (ie->indirect_info->vptr_changed)
2941 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2942 ie->indirect_info->otr_type);
2943 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
2944 targets = possible_polymorphic_call_targets
2945 (ie->indirect_info->otr_type,
2946 ie->indirect_info->otr_token,
2947 ctx, &final);
2948 if (final && targets.length () <= 1)
2949 {
2950 if (targets.length () == 1)
2951 target = targets[0]->decl;
2952 else
2953 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2954 }
2955 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
2956 && !ie->speculative && ie->maybe_hot_p ())
2957 {
2958 cgraph_node *n;
2959 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
2960 ie->indirect_info->otr_token,
2961 ie->indirect_info->context);
2962 if (n)
2963 {
2964 target = n->decl;
2965 speculative = true;
2966 }
2967 }
2968
2969 if (target)
2970 {
2971 if (!possible_polymorphic_call_target_p
2972 (ie, cgraph_node::get_create (target)))
2973 {
2974 if (speculative)
2975 return NULL;
2976 target = ipa_impossible_devirt_target (ie, target);
2977 }
2978 return ipa_make_edge_direct_to_target (ie, target, speculative);
2979 }
2980 else
2981 return NULL;
2982 }
2983
2984 /* Update the param called notes associated with NODE when CS is being inlined,
2985 assuming NODE is (potentially indirectly) inlined into CS->callee.
2986 Moreover, if the callee is discovered to be constant, create a new cgraph
2987 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2988 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2989
2990 static bool
2991 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2992 struct cgraph_node *node,
2993 vec<cgraph_edge *> *new_edges)
2994 {
2995 struct ipa_edge_args *top;
2996 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2997 struct ipa_node_params *new_root_info;
2998 bool res = false;
2999
3000 ipa_check_create_edge_args ();
3001 top = IPA_EDGE_REF (cs);
3002 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3003 ? cs->caller->global.inlined_to
3004 : cs->caller);
3005
3006 for (ie = node->indirect_calls; ie; ie = next_ie)
3007 {
3008 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3009 struct ipa_jump_func *jfunc;
3010 int param_index;
3011
3012 next_ie = ie->next_callee;
3013
3014 if (ici->param_index == -1)
3015 continue;
3016
3017 /* We must check range due to calls with variable number of arguments: */
3018 if (ici->param_index >= ipa_get_cs_argument_count (top))
3019 {
3020 ici->param_index = -1;
3021 continue;
3022 }
3023
3024 param_index = ici->param_index;
3025 jfunc = ipa_get_ith_jump_func (top, param_index);
3026
3027 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3028 new_direct_edge = NULL;
3029 else if (ici->polymorphic)
3030 {
3031 ipa_polymorphic_call_context ctx;
3032 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3033 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3034 }
3035 else
3036 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3037 new_root_info);
3038 /* If speculation was removed, then we need to do nothing. */
3039 if (new_direct_edge && new_direct_edge != ie)
3040 {
3041 new_direct_edge->indirect_inlining_edge = 1;
3042 top = IPA_EDGE_REF (cs);
3043 res = true;
3044 }
3045 else if (new_direct_edge)
3046 {
3047 new_direct_edge->indirect_inlining_edge = 1;
3048 if (new_direct_edge->call_stmt)
3049 new_direct_edge->call_stmt_cannot_inline_p
3050 = !gimple_check_call_matching_types (
3051 new_direct_edge->call_stmt,
3052 new_direct_edge->callee->decl, false);
3053 if (new_edges)
3054 {
3055 new_edges->safe_push (new_direct_edge);
3056 res = true;
3057 }
3058 top = IPA_EDGE_REF (cs);
3059 }
3060 else if (jfunc->type == IPA_JF_PASS_THROUGH
3061 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3062 {
3063 if ((ici->agg_contents
3064 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3065 || (ici->polymorphic
3066 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3067 ici->param_index = -1;
3068 else
3069 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3070 }
3071 else if (jfunc->type == IPA_JF_ANCESTOR)
3072 {
3073 if ((ici->agg_contents
3074 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3075 || (ici->polymorphic
3076 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3077 ici->param_index = -1;
3078 else
3079 {
3080 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3081 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3082 }
3083 }
3084 else
3085 /* Either we can find a destination for this edge now or never. */
3086 ici->param_index = -1;
3087 }
3088
3089 return res;
3090 }
3091
3092 /* Recursively traverse subtree of NODE (including node) made of inlined
3093 cgraph_edges when CS has been inlined and invoke
3094 update_indirect_edges_after_inlining on all nodes and
3095 update_jump_functions_after_inlining on all non-inlined edges that lead out
3096 of this subtree. Newly discovered indirect edges will be added to
3097 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3098 created. */
3099
3100 static bool
3101 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3102 struct cgraph_node *node,
3103 vec<cgraph_edge *> *new_edges)
3104 {
3105 struct cgraph_edge *e;
3106 bool res;
3107
3108 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3109
3110 for (e = node->callees; e; e = e->next_callee)
3111 if (!e->inline_failed)
3112 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3113 else
3114 update_jump_functions_after_inlining (cs, e);
3115 for (e = node->indirect_calls; e; e = e->next_callee)
3116 update_jump_functions_after_inlining (cs, e);
3117
3118 return res;
3119 }
3120
3121 /* Combine two controlled uses counts as done during inlining. */
3122
3123 static int
3124 combine_controlled_uses_counters (int c, int d)
3125 {
3126 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3127 return IPA_UNDESCRIBED_USE;
3128 else
3129 return c + d - 1;
3130 }
3131
3132 /* Propagate number of controlled users from CS->caleee to the new root of the
3133 tree of inlined nodes. */
3134
3135 static void
3136 propagate_controlled_uses (struct cgraph_edge *cs)
3137 {
3138 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3139 struct cgraph_node *new_root = cs->caller->global.inlined_to
3140 ? cs->caller->global.inlined_to : cs->caller;
3141 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3142 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3143 int count, i;
3144
3145 count = MIN (ipa_get_cs_argument_count (args),
3146 ipa_get_param_count (old_root_info));
3147 for (i = 0; i < count; i++)
3148 {
3149 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3150 struct ipa_cst_ref_desc *rdesc;
3151
3152 if (jf->type == IPA_JF_PASS_THROUGH)
3153 {
3154 int src_idx, c, d;
3155 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3156 c = ipa_get_controlled_uses (new_root_info, src_idx);
3157 d = ipa_get_controlled_uses (old_root_info, i);
3158
3159 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3160 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3161 c = combine_controlled_uses_counters (c, d);
3162 ipa_set_controlled_uses (new_root_info, src_idx, c);
3163 if (c == 0 && new_root_info->ipcp_orig_node)
3164 {
3165 struct cgraph_node *n;
3166 struct ipa_ref *ref;
3167 tree t = new_root_info->known_csts[src_idx];
3168
3169 if (t && TREE_CODE (t) == ADDR_EXPR
3170 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3171 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3172 && (ref = new_root->find_reference (n, NULL, 0)))
3173 {
3174 if (dump_file)
3175 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3176 "reference from %s/%i to %s/%i.\n",
3177 xstrdup (new_root->name ()),
3178 new_root->order,
3179 xstrdup (n->name ()), n->order);
3180 ref->remove_reference ();
3181 }
3182 }
3183 }
3184 else if (jf->type == IPA_JF_CONST
3185 && (rdesc = jfunc_rdesc_usable (jf)))
3186 {
3187 int d = ipa_get_controlled_uses (old_root_info, i);
3188 int c = rdesc->refcount;
3189 rdesc->refcount = combine_controlled_uses_counters (c, d);
3190 if (rdesc->refcount == 0)
3191 {
3192 tree cst = ipa_get_jf_constant (jf);
3193 struct cgraph_node *n;
3194 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3195 && TREE_CODE (TREE_OPERAND (cst, 0))
3196 == FUNCTION_DECL);
3197 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3198 if (n)
3199 {
3200 struct cgraph_node *clone;
3201 bool ok;
3202 ok = remove_described_reference (n, rdesc);
3203 gcc_checking_assert (ok);
3204
3205 clone = cs->caller;
3206 while (clone->global.inlined_to
3207 && clone != rdesc->cs->caller
3208 && IPA_NODE_REF (clone)->ipcp_orig_node)
3209 {
3210 struct ipa_ref *ref;
3211 ref = clone->find_reference (n, NULL, 0);
3212 if (ref)
3213 {
3214 if (dump_file)
3215 fprintf (dump_file, "ipa-prop: Removing "
3216 "cloning-created reference "
3217 "from %s/%i to %s/%i.\n",
3218 xstrdup (clone->name ()),
3219 clone->order,
3220 xstrdup (n->name ()),
3221 n->order);
3222 ref->remove_reference ();
3223 }
3224 clone = clone->callers->caller;
3225 }
3226 }
3227 }
3228 }
3229 }
3230
3231 for (i = ipa_get_param_count (old_root_info);
3232 i < ipa_get_cs_argument_count (args);
3233 i++)
3234 {
3235 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3236
3237 if (jf->type == IPA_JF_CONST)
3238 {
3239 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3240 if (rdesc)
3241 rdesc->refcount = IPA_UNDESCRIBED_USE;
3242 }
3243 else if (jf->type == IPA_JF_PASS_THROUGH)
3244 ipa_set_controlled_uses (new_root_info,
3245 jf->value.pass_through.formal_id,
3246 IPA_UNDESCRIBED_USE);
3247 }
3248 }
3249
3250 /* Update jump functions and call note functions on inlining the call site CS.
3251 CS is expected to lead to a node already cloned by
3252 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3253 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3254 created. */
3255
3256 bool
3257 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3258 vec<cgraph_edge *> *new_edges)
3259 {
3260 bool changed;
3261 /* Do nothing if the preparation phase has not been carried out yet
3262 (i.e. during early inlining). */
3263 if (!ipa_node_params_vector.exists ())
3264 return false;
3265 gcc_assert (ipa_edge_args_vector);
3266
3267 propagate_controlled_uses (cs);
3268 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3269
3270 return changed;
3271 }
3272
3273 /* Frees all dynamically allocated structures that the argument info points
3274 to. */
3275
3276 void
3277 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3278 {
3279 vec_free (args->jump_functions);
3280 memset (args, 0, sizeof (*args));
3281 }
3282
3283 /* Free all ipa_edge structures. */
3284
3285 void
3286 ipa_free_all_edge_args (void)
3287 {
3288 int i;
3289 struct ipa_edge_args *args;
3290
3291 if (!ipa_edge_args_vector)
3292 return;
3293
3294 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3295 ipa_free_edge_args_substructures (args);
3296
3297 vec_free (ipa_edge_args_vector);
3298 }
3299
3300 /* Frees all dynamically allocated structures that the param info points
3301 to. */
3302
3303 void
3304 ipa_free_node_params_substructures (struct ipa_node_params *info)
3305 {
3306 info->descriptors.release ();
3307 free (info->lattices);
3308 /* Lattice values and their sources are deallocated with their alocation
3309 pool. */
3310 info->known_csts.release ();
3311 info->known_contexts.release ();
3312 memset (info, 0, sizeof (*info));
3313 }
3314
3315 /* Free all ipa_node_params structures. */
3316
3317 void
3318 ipa_free_all_node_params (void)
3319 {
3320 int i;
3321 struct ipa_node_params *info;
3322
3323 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3324 ipa_free_node_params_substructures (info);
3325
3326 ipa_node_params_vector.release ();
3327 }
3328
3329 /* Set the aggregate replacements of NODE to be AGGVALS. */
3330
3331 void
3332 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3333 struct ipa_agg_replacement_value *aggvals)
3334 {
3335 if (vec_safe_length (ipa_node_agg_replacements)
3336 <= (unsigned) symtab->cgraph_max_uid)
3337 vec_safe_grow_cleared (ipa_node_agg_replacements,
3338 symtab->cgraph_max_uid + 1);
3339
3340 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3341 }
3342
3343 /* Hook that is called by cgraph.c when an edge is removed. */
3344
3345 static void
3346 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3347 {
3348 struct ipa_edge_args *args;
3349
3350 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3351 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3352 return;
3353
3354 args = IPA_EDGE_REF (cs);
3355 if (args->jump_functions)
3356 {
3357 struct ipa_jump_func *jf;
3358 int i;
3359 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3360 {
3361 struct ipa_cst_ref_desc *rdesc;
3362 try_decrement_rdesc_refcount (jf);
3363 if (jf->type == IPA_JF_CONST
3364 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3365 && rdesc->cs == cs)
3366 rdesc->cs = NULL;
3367 }
3368 }
3369
3370 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3371 }
3372
3373 /* Hook that is called by cgraph.c when a node is removed. */
3374
3375 static void
3376 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3377 {
3378 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3379 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3380 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3381 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3382 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3383 }
3384
3385 /* Hook that is called by cgraph.c when an edge is duplicated. */
3386
3387 static void
3388 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3389 __attribute__((unused)) void *data)
3390 {
3391 struct ipa_edge_args *old_args, *new_args;
3392 unsigned int i;
3393
3394 ipa_check_create_edge_args ();
3395
3396 old_args = IPA_EDGE_REF (src);
3397 new_args = IPA_EDGE_REF (dst);
3398
3399 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3400 if (old_args->polymorphic_call_contexts)
3401 new_args->polymorphic_call_contexts
3402 = vec_safe_copy (old_args->polymorphic_call_contexts);
3403
3404 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3405 {
3406 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3407 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3408
3409 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3410
3411 if (src_jf->type == IPA_JF_CONST)
3412 {
3413 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3414
3415 if (!src_rdesc)
3416 dst_jf->value.constant.rdesc = NULL;
3417 else if (src->caller == dst->caller)
3418 {
3419 struct ipa_ref *ref;
3420 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3421 gcc_checking_assert (n);
3422 ref = src->caller->find_reference (n, src->call_stmt,
3423 src->lto_stmt_uid);
3424 gcc_checking_assert (ref);
3425 dst->caller->clone_reference (ref, ref->stmt);
3426
3427 gcc_checking_assert (ipa_refdesc_pool);
3428 struct ipa_cst_ref_desc *dst_rdesc
3429 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3430 dst_rdesc->cs = dst;
3431 dst_rdesc->refcount = src_rdesc->refcount;
3432 dst_rdesc->next_duplicate = NULL;
3433 dst_jf->value.constant.rdesc = dst_rdesc;
3434 }
3435 else if (src_rdesc->cs == src)
3436 {
3437 struct ipa_cst_ref_desc *dst_rdesc;
3438 gcc_checking_assert (ipa_refdesc_pool);
3439 dst_rdesc
3440 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3441 dst_rdesc->cs = dst;
3442 dst_rdesc->refcount = src_rdesc->refcount;
3443 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3444 src_rdesc->next_duplicate = dst_rdesc;
3445 dst_jf->value.constant.rdesc = dst_rdesc;
3446 }
3447 else
3448 {
3449 struct ipa_cst_ref_desc *dst_rdesc;
3450 /* This can happen during inlining, when a JFUNC can refer to a
3451 reference taken in a function up in the tree of inline clones.
3452 We need to find the duplicate that refers to our tree of
3453 inline clones. */
3454
3455 gcc_assert (dst->caller->global.inlined_to);
3456 for (dst_rdesc = src_rdesc->next_duplicate;
3457 dst_rdesc;
3458 dst_rdesc = dst_rdesc->next_duplicate)
3459 {
3460 struct cgraph_node *top;
3461 top = dst_rdesc->cs->caller->global.inlined_to
3462 ? dst_rdesc->cs->caller->global.inlined_to
3463 : dst_rdesc->cs->caller;
3464 if (dst->caller->global.inlined_to == top)
3465 break;
3466 }
3467 gcc_assert (dst_rdesc);
3468 dst_jf->value.constant.rdesc = dst_rdesc;
3469 }
3470 }
3471 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3472 && src->caller == dst->caller)
3473 {
3474 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3475 ? dst->caller->global.inlined_to : dst->caller;
3476 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3477 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3478
3479 int c = ipa_get_controlled_uses (root_info, idx);
3480 if (c != IPA_UNDESCRIBED_USE)
3481 {
3482 c++;
3483 ipa_set_controlled_uses (root_info, idx, c);
3484 }
3485 }
3486 }
3487 }
3488
3489 /* Hook that is called by cgraph.c when a node is duplicated. */
3490
3491 static void
3492 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3493 ATTRIBUTE_UNUSED void *data)
3494 {
3495 struct ipa_node_params *old_info, *new_info;
3496 struct ipa_agg_replacement_value *old_av, *new_av;
3497
3498 ipa_check_create_node_params ();
3499 old_info = IPA_NODE_REF (src);
3500 new_info = IPA_NODE_REF (dst);
3501
3502 new_info->descriptors = old_info->descriptors.copy ();
3503 new_info->lattices = NULL;
3504 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3505
3506 new_info->analysis_done = old_info->analysis_done;
3507 new_info->node_enqueued = old_info->node_enqueued;
3508
3509 old_av = ipa_get_agg_replacements_for_node (src);
3510 if (!old_av)
3511 return;
3512
3513 new_av = NULL;
3514 while (old_av)
3515 {
3516 struct ipa_agg_replacement_value *v;
3517
3518 v = ggc_alloc<ipa_agg_replacement_value> ();
3519 memcpy (v, old_av, sizeof (*v));
3520 v->next = new_av;
3521 new_av = v;
3522 old_av = old_av->next;
3523 }
3524 ipa_set_node_agg_value_chain (dst, new_av);
3525 }
3526
3527
3528 /* Analyze newly added function into callgraph. */
3529
3530 static void
3531 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3532 {
3533 if (node->has_gimple_body_p ())
3534 ipa_analyze_node (node);
3535 }
3536
3537 /* Register our cgraph hooks if they are not already there. */
3538
3539 void
3540 ipa_register_cgraph_hooks (void)
3541 {
3542 if (!edge_removal_hook_holder)
3543 edge_removal_hook_holder =
3544 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3545 if (!node_removal_hook_holder)
3546 node_removal_hook_holder =
3547 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3548 if (!edge_duplication_hook_holder)
3549 edge_duplication_hook_holder =
3550 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3551 if (!node_duplication_hook_holder)
3552 node_duplication_hook_holder =
3553 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3554 function_insertion_hook_holder =
3555 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3556 }
3557
3558 /* Unregister our cgraph hooks if they are not already there. */
3559
3560 static void
3561 ipa_unregister_cgraph_hooks (void)
3562 {
3563 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3564 edge_removal_hook_holder = NULL;
3565 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3566 node_removal_hook_holder = NULL;
3567 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3568 edge_duplication_hook_holder = NULL;
3569 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3570 node_duplication_hook_holder = NULL;
3571 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3572 function_insertion_hook_holder = NULL;
3573 }
3574
3575 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3576 longer needed after ipa-cp. */
3577
3578 void
3579 ipa_free_all_structures_after_ipa_cp (void)
3580 {
3581 if (!optimize && !in_lto_p)
3582 {
3583 ipa_free_all_edge_args ();
3584 ipa_free_all_node_params ();
3585 free_alloc_pool (ipcp_sources_pool);
3586 free_alloc_pool (ipcp_cst_values_pool);
3587 free_alloc_pool (ipcp_poly_ctx_values_pool);
3588 free_alloc_pool (ipcp_agg_lattice_pool);
3589 ipa_unregister_cgraph_hooks ();
3590 if (ipa_refdesc_pool)
3591 free_alloc_pool (ipa_refdesc_pool);
3592 }
3593 }
3594
3595 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3596 longer needed after indirect inlining. */
3597
3598 void
3599 ipa_free_all_structures_after_iinln (void)
3600 {
3601 ipa_free_all_edge_args ();
3602 ipa_free_all_node_params ();
3603 ipa_unregister_cgraph_hooks ();
3604 if (ipcp_sources_pool)
3605 free_alloc_pool (ipcp_sources_pool);
3606 if (ipcp_cst_values_pool)
3607 free_alloc_pool (ipcp_cst_values_pool);
3608 if (ipcp_poly_ctx_values_pool)
3609 free_alloc_pool (ipcp_poly_ctx_values_pool);
3610 if (ipcp_agg_lattice_pool)
3611 free_alloc_pool (ipcp_agg_lattice_pool);
3612 if (ipa_refdesc_pool)
3613 free_alloc_pool (ipa_refdesc_pool);
3614 }
3615
3616 /* Print ipa_tree_map data structures of all functions in the
3617 callgraph to F. */
3618
3619 void
3620 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3621 {
3622 int i, count;
3623 struct ipa_node_params *info;
3624
3625 if (!node->definition)
3626 return;
3627 info = IPA_NODE_REF (node);
3628 fprintf (f, " function %s/%i parameter descriptors:\n",
3629 node->name (), node->order);
3630 count = ipa_get_param_count (info);
3631 for (i = 0; i < count; i++)
3632 {
3633 int c;
3634
3635 fprintf (f, " ");
3636 ipa_dump_param (f, info, i);
3637 if (ipa_is_param_used (info, i))
3638 fprintf (f, " used");
3639 c = ipa_get_controlled_uses (info, i);
3640 if (c == IPA_UNDESCRIBED_USE)
3641 fprintf (f, " undescribed_use");
3642 else
3643 fprintf (f, " controlled_uses=%i", c);
3644 fprintf (f, "\n");
3645 }
3646 }
3647
3648 /* Print ipa_tree_map data structures of all functions in the
3649 callgraph to F. */
3650
3651 void
3652 ipa_print_all_params (FILE * f)
3653 {
3654 struct cgraph_node *node;
3655
3656 fprintf (f, "\nFunction parameters:\n");
3657 FOR_EACH_FUNCTION (node)
3658 ipa_print_node_params (f, node);
3659 }
3660
3661 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3662
3663 vec<tree>
3664 ipa_get_vector_of_formal_parms (tree fndecl)
3665 {
3666 vec<tree> args;
3667 int count;
3668 tree parm;
3669
3670 gcc_assert (!flag_wpa);
3671 count = count_formal_params (fndecl);
3672 args.create (count);
3673 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3674 args.quick_push (parm);
3675
3676 return args;
3677 }
3678
3679 /* Return a heap allocated vector containing types of formal parameters of
3680 function type FNTYPE. */
3681
3682 vec<tree>
3683 ipa_get_vector_of_formal_parm_types (tree fntype)
3684 {
3685 vec<tree> types;
3686 int count = 0;
3687 tree t;
3688
3689 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3690 count++;
3691
3692 types.create (count);
3693 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3694 types.quick_push (TREE_VALUE (t));
3695
3696 return types;
3697 }
3698
3699 /* Modify the function declaration FNDECL and its type according to the plan in
3700 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3701 to reflect the actual parameters being modified which are determined by the
3702 base_index field. */
3703
3704 void
3705 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3706 {
3707 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3708 tree orig_type = TREE_TYPE (fndecl);
3709 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3710
3711 /* The following test is an ugly hack, some functions simply don't have any
3712 arguments in their type. This is probably a bug but well... */
3713 bool care_for_types = (old_arg_types != NULL_TREE);
3714 bool last_parm_void;
3715 vec<tree> otypes;
3716 if (care_for_types)
3717 {
3718 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3719 == void_type_node);
3720 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3721 if (last_parm_void)
3722 gcc_assert (oparms.length () + 1 == otypes.length ());
3723 else
3724 gcc_assert (oparms.length () == otypes.length ());
3725 }
3726 else
3727 {
3728 last_parm_void = false;
3729 otypes.create (0);
3730 }
3731
3732 int len = adjustments.length ();
3733 tree *link = &DECL_ARGUMENTS (fndecl);
3734 tree new_arg_types = NULL;
3735 for (int i = 0; i < len; i++)
3736 {
3737 struct ipa_parm_adjustment *adj;
3738 gcc_assert (link);
3739
3740 adj = &adjustments[i];
3741 tree parm;
3742 if (adj->op == IPA_PARM_OP_NEW)
3743 parm = NULL;
3744 else
3745 parm = oparms[adj->base_index];
3746 adj->base = parm;
3747
3748 if (adj->op == IPA_PARM_OP_COPY)
3749 {
3750 if (care_for_types)
3751 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3752 new_arg_types);
3753 *link = parm;
3754 link = &DECL_CHAIN (parm);
3755 }
3756 else if (adj->op != IPA_PARM_OP_REMOVE)
3757 {
3758 tree new_parm;
3759 tree ptype;
3760
3761 if (adj->by_ref)
3762 ptype = build_pointer_type (adj->type);
3763 else
3764 {
3765 ptype = adj->type;
3766 if (is_gimple_reg_type (ptype))
3767 {
3768 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3769 if (TYPE_ALIGN (ptype) < malign)
3770 ptype = build_aligned_type (ptype, malign);
3771 }
3772 }
3773
3774 if (care_for_types)
3775 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3776
3777 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3778 ptype);
3779 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3780 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3781 DECL_ARTIFICIAL (new_parm) = 1;
3782 DECL_ARG_TYPE (new_parm) = ptype;
3783 DECL_CONTEXT (new_parm) = fndecl;
3784 TREE_USED (new_parm) = 1;
3785 DECL_IGNORED_P (new_parm) = 1;
3786 layout_decl (new_parm, 0);
3787
3788 if (adj->op == IPA_PARM_OP_NEW)
3789 adj->base = NULL;
3790 else
3791 adj->base = parm;
3792 adj->new_decl = new_parm;
3793
3794 *link = new_parm;
3795 link = &DECL_CHAIN (new_parm);
3796 }
3797 }
3798
3799 *link = NULL_TREE;
3800
3801 tree new_reversed = NULL;
3802 if (care_for_types)
3803 {
3804 new_reversed = nreverse (new_arg_types);
3805 if (last_parm_void)
3806 {
3807 if (new_reversed)
3808 TREE_CHAIN (new_arg_types) = void_list_node;
3809 else
3810 new_reversed = void_list_node;
3811 }
3812 }
3813
3814 /* Use copy_node to preserve as much as possible from original type
3815 (debug info, attribute lists etc.)
3816 Exception is METHOD_TYPEs must have THIS argument.
3817 When we are asked to remove it, we need to build new FUNCTION_TYPE
3818 instead. */
3819 tree new_type = NULL;
3820 if (TREE_CODE (orig_type) != METHOD_TYPE
3821 || (adjustments[0].op == IPA_PARM_OP_COPY
3822 && adjustments[0].base_index == 0))
3823 {
3824 new_type = build_distinct_type_copy (orig_type);
3825 TYPE_ARG_TYPES (new_type) = new_reversed;
3826 }
3827 else
3828 {
3829 new_type
3830 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3831 new_reversed));
3832 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3833 DECL_VINDEX (fndecl) = NULL_TREE;
3834 }
3835
3836 /* When signature changes, we need to clear builtin info. */
3837 if (DECL_BUILT_IN (fndecl))
3838 {
3839 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3840 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3841 }
3842
3843 TREE_TYPE (fndecl) = new_type;
3844 DECL_VIRTUAL_P (fndecl) = 0;
3845 DECL_LANG_SPECIFIC (fndecl) = NULL;
3846 otypes.release ();
3847 oparms.release ();
3848 }
3849
3850 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3851 If this is a directly recursive call, CS must be NULL. Otherwise it must
3852 contain the corresponding call graph edge. */
3853
3854 void
3855 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3856 ipa_parm_adjustment_vec adjustments)
3857 {
3858 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3859 vec<tree> vargs;
3860 vec<tree, va_gc> **debug_args = NULL;
3861 gimple new_stmt;
3862 gimple_stmt_iterator gsi, prev_gsi;
3863 tree callee_decl;
3864 int i, len;
3865
3866 len = adjustments.length ();
3867 vargs.create (len);
3868 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3869 current_node->remove_stmt_references (stmt);
3870
3871 gsi = gsi_for_stmt (stmt);
3872 prev_gsi = gsi;
3873 gsi_prev (&prev_gsi);
3874 for (i = 0; i < len; i++)
3875 {
3876 struct ipa_parm_adjustment *adj;
3877
3878 adj = &adjustments[i];
3879
3880 if (adj->op == IPA_PARM_OP_COPY)
3881 {
3882 tree arg = gimple_call_arg (stmt, adj->base_index);
3883
3884 vargs.quick_push (arg);
3885 }
3886 else if (adj->op != IPA_PARM_OP_REMOVE)
3887 {
3888 tree expr, base, off;
3889 location_t loc;
3890 unsigned int deref_align = 0;
3891 bool deref_base = false;
3892
3893 /* We create a new parameter out of the value of the old one, we can
3894 do the following kind of transformations:
3895
3896 - A scalar passed by reference is converted to a scalar passed by
3897 value. (adj->by_ref is false and the type of the original
3898 actual argument is a pointer to a scalar).
3899
3900 - A part of an aggregate is passed instead of the whole aggregate.
3901 The part can be passed either by value or by reference, this is
3902 determined by value of adj->by_ref. Moreover, the code below
3903 handles both situations when the original aggregate is passed by
3904 value (its type is not a pointer) and when it is passed by
3905 reference (it is a pointer to an aggregate).
3906
3907 When the new argument is passed by reference (adj->by_ref is true)
3908 it must be a part of an aggregate and therefore we form it by
3909 simply taking the address of a reference inside the original
3910 aggregate. */
3911
3912 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3913 base = gimple_call_arg (stmt, adj->base_index);
3914 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3915 : EXPR_LOCATION (base);
3916
3917 if (TREE_CODE (base) != ADDR_EXPR
3918 && POINTER_TYPE_P (TREE_TYPE (base)))
3919 off = build_int_cst (adj->alias_ptr_type,
3920 adj->offset / BITS_PER_UNIT);
3921 else
3922 {
3923 HOST_WIDE_INT base_offset;
3924 tree prev_base;
3925 bool addrof;
3926
3927 if (TREE_CODE (base) == ADDR_EXPR)
3928 {
3929 base = TREE_OPERAND (base, 0);
3930 addrof = true;
3931 }
3932 else
3933 addrof = false;
3934 prev_base = base;
3935 base = get_addr_base_and_unit_offset (base, &base_offset);
3936 /* Aggregate arguments can have non-invariant addresses. */
3937 if (!base)
3938 {
3939 base = build_fold_addr_expr (prev_base);
3940 off = build_int_cst (adj->alias_ptr_type,
3941 adj->offset / BITS_PER_UNIT);
3942 }
3943 else if (TREE_CODE (base) == MEM_REF)
3944 {
3945 if (!addrof)
3946 {
3947 deref_base = true;
3948 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3949 }
3950 off = build_int_cst (adj->alias_ptr_type,
3951 base_offset
3952 + adj->offset / BITS_PER_UNIT);
3953 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3954 off);
3955 base = TREE_OPERAND (base, 0);
3956 }
3957 else
3958 {
3959 off = build_int_cst (adj->alias_ptr_type,
3960 base_offset
3961 + adj->offset / BITS_PER_UNIT);
3962 base = build_fold_addr_expr (base);
3963 }
3964 }
3965
3966 if (!adj->by_ref)
3967 {
3968 tree type = adj->type;
3969 unsigned int align;
3970 unsigned HOST_WIDE_INT misalign;
3971
3972 if (deref_base)
3973 {
3974 align = deref_align;
3975 misalign = 0;
3976 }
3977 else
3978 {
3979 get_pointer_alignment_1 (base, &align, &misalign);
3980 if (TYPE_ALIGN (type) > align)
3981 align = TYPE_ALIGN (type);
3982 }
3983 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3984 * BITS_PER_UNIT);
3985 misalign = misalign & (align - 1);
3986 if (misalign != 0)
3987 align = (misalign & -misalign);
3988 if (align < TYPE_ALIGN (type))
3989 type = build_aligned_type (type, align);
3990 base = force_gimple_operand_gsi (&gsi, base,
3991 true, NULL, true, GSI_SAME_STMT);
3992 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3993 /* If expr is not a valid gimple call argument emit
3994 a load into a temporary. */
3995 if (is_gimple_reg_type (TREE_TYPE (expr)))
3996 {
3997 gimple tem = gimple_build_assign (NULL_TREE, expr);
3998 if (gimple_in_ssa_p (cfun))
3999 {
4000 gimple_set_vuse (tem, gimple_vuse (stmt));
4001 expr = make_ssa_name (TREE_TYPE (expr), tem);
4002 }
4003 else
4004 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4005 gimple_assign_set_lhs (tem, expr);
4006 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4007 }
4008 }
4009 else
4010 {
4011 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4012 expr = build_fold_addr_expr (expr);
4013 expr = force_gimple_operand_gsi (&gsi, expr,
4014 true, NULL, true, GSI_SAME_STMT);
4015 }
4016 vargs.quick_push (expr);
4017 }
4018 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4019 {
4020 unsigned int ix;
4021 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4022 gimple def_temp;
4023
4024 arg = gimple_call_arg (stmt, adj->base_index);
4025 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4026 {
4027 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4028 continue;
4029 arg = fold_convert_loc (gimple_location (stmt),
4030 TREE_TYPE (origin), arg);
4031 }
4032 if (debug_args == NULL)
4033 debug_args = decl_debug_args_insert (callee_decl);
4034 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4035 if (ddecl == origin)
4036 {
4037 ddecl = (**debug_args)[ix + 1];
4038 break;
4039 }
4040 if (ddecl == NULL)
4041 {
4042 ddecl = make_node (DEBUG_EXPR_DECL);
4043 DECL_ARTIFICIAL (ddecl) = 1;
4044 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4045 DECL_MODE (ddecl) = DECL_MODE (origin);
4046
4047 vec_safe_push (*debug_args, origin);
4048 vec_safe_push (*debug_args, ddecl);
4049 }
4050 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4051 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4052 }
4053 }
4054
4055 if (dump_file && (dump_flags & TDF_DETAILS))
4056 {
4057 fprintf (dump_file, "replacing stmt:");
4058 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4059 }
4060
4061 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4062 vargs.release ();
4063 if (gimple_call_lhs (stmt))
4064 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4065
4066 gimple_set_block (new_stmt, gimple_block (stmt));
4067 if (gimple_has_location (stmt))
4068 gimple_set_location (new_stmt, gimple_location (stmt));
4069 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4070 gimple_call_copy_flags (new_stmt, stmt);
4071 if (gimple_in_ssa_p (cfun))
4072 {
4073 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4074 if (gimple_vdef (stmt))
4075 {
4076 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4077 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4078 }
4079 }
4080
4081 if (dump_file && (dump_flags & TDF_DETAILS))
4082 {
4083 fprintf (dump_file, "with stmt:");
4084 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4085 fprintf (dump_file, "\n");
4086 }
4087 gsi_replace (&gsi, new_stmt, true);
4088 if (cs)
4089 cs->set_call_stmt (new_stmt);
4090 do
4091 {
4092 current_node->record_stmt_references (gsi_stmt (gsi));
4093 gsi_prev (&gsi);
4094 }
4095 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4096 }
4097
4098 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4099 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4100 specifies whether the function should care about type incompatibility the
4101 current and new expressions. If it is false, the function will leave
4102 incompatibility issues to the caller. Return true iff the expression
4103 was modified. */
4104
4105 bool
4106 ipa_modify_expr (tree *expr, bool convert,
4107 ipa_parm_adjustment_vec adjustments)
4108 {
4109 struct ipa_parm_adjustment *cand
4110 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4111 if (!cand)
4112 return false;
4113
4114 tree src;
4115 if (cand->by_ref)
4116 src = build_simple_mem_ref (cand->new_decl);
4117 else
4118 src = cand->new_decl;
4119
4120 if (dump_file && (dump_flags & TDF_DETAILS))
4121 {
4122 fprintf (dump_file, "About to replace expr ");
4123 print_generic_expr (dump_file, *expr, 0);
4124 fprintf (dump_file, " with ");
4125 print_generic_expr (dump_file, src, 0);
4126 fprintf (dump_file, "\n");
4127 }
4128
4129 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4130 {
4131 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4132 *expr = vce;
4133 }
4134 else
4135 *expr = src;
4136 return true;
4137 }
4138
4139 /* If T is an SSA_NAME, return NULL if it is not a default def or
4140 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4141 the base variable is always returned, regardless if it is a default
4142 def. Return T if it is not an SSA_NAME. */
4143
4144 static tree
4145 get_ssa_base_param (tree t, bool ignore_default_def)
4146 {
4147 if (TREE_CODE (t) == SSA_NAME)
4148 {
4149 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4150 return SSA_NAME_VAR (t);
4151 else
4152 return NULL_TREE;
4153 }
4154 return t;
4155 }
4156
4157 /* Given an expression, return an adjustment entry specifying the
4158 transformation to be done on EXPR. If no suitable adjustment entry
4159 was found, returns NULL.
4160
4161 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4162 default def, otherwise bail on them.
4163
4164 If CONVERT is non-NULL, this function will set *CONVERT if the
4165 expression provided is a component reference. ADJUSTMENTS is the
4166 adjustments vector. */
4167
4168 ipa_parm_adjustment *
4169 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4170 ipa_parm_adjustment_vec adjustments,
4171 bool ignore_default_def)
4172 {
4173 if (TREE_CODE (**expr) == BIT_FIELD_REF
4174 || TREE_CODE (**expr) == IMAGPART_EXPR
4175 || TREE_CODE (**expr) == REALPART_EXPR)
4176 {
4177 *expr = &TREE_OPERAND (**expr, 0);
4178 if (convert)
4179 *convert = true;
4180 }
4181
4182 HOST_WIDE_INT offset, size, max_size;
4183 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4184 if (!base || size == -1 || max_size == -1)
4185 return NULL;
4186
4187 if (TREE_CODE (base) == MEM_REF)
4188 {
4189 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4190 base = TREE_OPERAND (base, 0);
4191 }
4192
4193 base = get_ssa_base_param (base, ignore_default_def);
4194 if (!base || TREE_CODE (base) != PARM_DECL)
4195 return NULL;
4196
4197 struct ipa_parm_adjustment *cand = NULL;
4198 unsigned int len = adjustments.length ();
4199 for (unsigned i = 0; i < len; i++)
4200 {
4201 struct ipa_parm_adjustment *adj = &adjustments[i];
4202
4203 if (adj->base == base
4204 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4205 {
4206 cand = adj;
4207 break;
4208 }
4209 }
4210
4211 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4212 return NULL;
4213 return cand;
4214 }
4215
4216 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4217
4218 static bool
4219 index_in_adjustments_multiple_times_p (int base_index,
4220 ipa_parm_adjustment_vec adjustments)
4221 {
4222 int i, len = adjustments.length ();
4223 bool one = false;
4224
4225 for (i = 0; i < len; i++)
4226 {
4227 struct ipa_parm_adjustment *adj;
4228 adj = &adjustments[i];
4229
4230 if (adj->base_index == base_index)
4231 {
4232 if (one)
4233 return true;
4234 else
4235 one = true;
4236 }
4237 }
4238 return false;
4239 }
4240
4241
4242 /* Return adjustments that should have the same effect on function parameters
4243 and call arguments as if they were first changed according to adjustments in
4244 INNER and then by adjustments in OUTER. */
4245
4246 ipa_parm_adjustment_vec
4247 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4248 ipa_parm_adjustment_vec outer)
4249 {
4250 int i, outlen = outer.length ();
4251 int inlen = inner.length ();
4252 int removals = 0;
4253 ipa_parm_adjustment_vec adjustments, tmp;
4254
4255 tmp.create (inlen);
4256 for (i = 0; i < inlen; i++)
4257 {
4258 struct ipa_parm_adjustment *n;
4259 n = &inner[i];
4260
4261 if (n->op == IPA_PARM_OP_REMOVE)
4262 removals++;
4263 else
4264 {
4265 /* FIXME: Handling of new arguments are not implemented yet. */
4266 gcc_assert (n->op != IPA_PARM_OP_NEW);
4267 tmp.quick_push (*n);
4268 }
4269 }
4270
4271 adjustments.create (outlen + removals);
4272 for (i = 0; i < outlen; i++)
4273 {
4274 struct ipa_parm_adjustment r;
4275 struct ipa_parm_adjustment *out = &outer[i];
4276 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4277
4278 memset (&r, 0, sizeof (r));
4279 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4280 if (out->op == IPA_PARM_OP_REMOVE)
4281 {
4282 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4283 {
4284 r.op = IPA_PARM_OP_REMOVE;
4285 adjustments.quick_push (r);
4286 }
4287 continue;
4288 }
4289 else
4290 {
4291 /* FIXME: Handling of new arguments are not implemented yet. */
4292 gcc_assert (out->op != IPA_PARM_OP_NEW);
4293 }
4294
4295 r.base_index = in->base_index;
4296 r.type = out->type;
4297
4298 /* FIXME: Create nonlocal value too. */
4299
4300 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4301 r.op = IPA_PARM_OP_COPY;
4302 else if (in->op == IPA_PARM_OP_COPY)
4303 r.offset = out->offset;
4304 else if (out->op == IPA_PARM_OP_COPY)
4305 r.offset = in->offset;
4306 else
4307 r.offset = in->offset + out->offset;
4308 adjustments.quick_push (r);
4309 }
4310
4311 for (i = 0; i < inlen; i++)
4312 {
4313 struct ipa_parm_adjustment *n = &inner[i];
4314
4315 if (n->op == IPA_PARM_OP_REMOVE)
4316 adjustments.quick_push (*n);
4317 }
4318
4319 tmp.release ();
4320 return adjustments;
4321 }
4322
4323 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4324 friendly way, assuming they are meant to be applied to FNDECL. */
4325
4326 void
4327 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4328 tree fndecl)
4329 {
4330 int i, len = adjustments.length ();
4331 bool first = true;
4332 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4333
4334 fprintf (file, "IPA param adjustments: ");
4335 for (i = 0; i < len; i++)
4336 {
4337 struct ipa_parm_adjustment *adj;
4338 adj = &adjustments[i];
4339
4340 if (!first)
4341 fprintf (file, " ");
4342 else
4343 first = false;
4344
4345 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4346 print_generic_expr (file, parms[adj->base_index], 0);
4347 if (adj->base)
4348 {
4349 fprintf (file, ", base: ");
4350 print_generic_expr (file, adj->base, 0);
4351 }
4352 if (adj->new_decl)
4353 {
4354 fprintf (file, ", new_decl: ");
4355 print_generic_expr (file, adj->new_decl, 0);
4356 }
4357 if (adj->new_ssa_base)
4358 {
4359 fprintf (file, ", new_ssa_base: ");
4360 print_generic_expr (file, adj->new_ssa_base, 0);
4361 }
4362
4363 if (adj->op == IPA_PARM_OP_COPY)
4364 fprintf (file, ", copy_param");
4365 else if (adj->op == IPA_PARM_OP_REMOVE)
4366 fprintf (file, ", remove_param");
4367 else
4368 fprintf (file, ", offset %li", (long) adj->offset);
4369 if (adj->by_ref)
4370 fprintf (file, ", by_ref");
4371 print_node_brief (file, ", type: ", adj->type, 0);
4372 fprintf (file, "\n");
4373 }
4374 parms.release ();
4375 }
4376
4377 /* Dump the AV linked list. */
4378
4379 void
4380 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4381 {
4382 bool comma = false;
4383 fprintf (f, " Aggregate replacements:");
4384 for (; av; av = av->next)
4385 {
4386 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4387 av->index, av->offset);
4388 print_generic_expr (f, av->value, 0);
4389 comma = true;
4390 }
4391 fprintf (f, "\n");
4392 }
4393
4394 /* Stream out jump function JUMP_FUNC to OB. */
4395
4396 static void
4397 ipa_write_jump_function (struct output_block *ob,
4398 struct ipa_jump_func *jump_func)
4399 {
4400 struct ipa_agg_jf_item *item;
4401 struct bitpack_d bp;
4402 int i, count;
4403
4404 streamer_write_uhwi (ob, jump_func->type);
4405 switch (jump_func->type)
4406 {
4407 case IPA_JF_UNKNOWN:
4408 break;
4409 case IPA_JF_CONST:
4410 gcc_assert (
4411 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4412 stream_write_tree (ob, jump_func->value.constant.value, true);
4413 break;
4414 case IPA_JF_PASS_THROUGH:
4415 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4416 if (jump_func->value.pass_through.operation == NOP_EXPR)
4417 {
4418 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4419 bp = bitpack_create (ob->main_stream);
4420 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4421 streamer_write_bitpack (&bp);
4422 }
4423 else
4424 {
4425 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4426 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4427 }
4428 break;
4429 case IPA_JF_ANCESTOR:
4430 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4431 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4432 bp = bitpack_create (ob->main_stream);
4433 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4434 streamer_write_bitpack (&bp);
4435 break;
4436 }
4437
4438 count = vec_safe_length (jump_func->agg.items);
4439 streamer_write_uhwi (ob, count);
4440 if (count)
4441 {
4442 bp = bitpack_create (ob->main_stream);
4443 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4444 streamer_write_bitpack (&bp);
4445 }
4446
4447 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4448 {
4449 streamer_write_uhwi (ob, item->offset);
4450 stream_write_tree (ob, item->value, true);
4451 }
4452 }
4453
4454 /* Read in jump function JUMP_FUNC from IB. */
4455
4456 static void
4457 ipa_read_jump_function (struct lto_input_block *ib,
4458 struct ipa_jump_func *jump_func,
4459 struct cgraph_edge *cs,
4460 struct data_in *data_in)
4461 {
4462 enum jump_func_type jftype;
4463 enum tree_code operation;
4464 int i, count;
4465
4466 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4467 switch (jftype)
4468 {
4469 case IPA_JF_UNKNOWN:
4470 jump_func->type = IPA_JF_UNKNOWN;
4471 break;
4472 case IPA_JF_CONST:
4473 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4474 break;
4475 case IPA_JF_PASS_THROUGH:
4476 operation = (enum tree_code) streamer_read_uhwi (ib);
4477 if (operation == NOP_EXPR)
4478 {
4479 int formal_id = streamer_read_uhwi (ib);
4480 struct bitpack_d bp = streamer_read_bitpack (ib);
4481 bool agg_preserved = bp_unpack_value (&bp, 1);
4482 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4483 }
4484 else
4485 {
4486 tree operand = stream_read_tree (ib, data_in);
4487 int formal_id = streamer_read_uhwi (ib);
4488 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4489 operation);
4490 }
4491 break;
4492 case IPA_JF_ANCESTOR:
4493 {
4494 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4495 int formal_id = streamer_read_uhwi (ib);
4496 struct bitpack_d bp = streamer_read_bitpack (ib);
4497 bool agg_preserved = bp_unpack_value (&bp, 1);
4498 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4499 break;
4500 }
4501 }
4502
4503 count = streamer_read_uhwi (ib);
4504 vec_alloc (jump_func->agg.items, count);
4505 if (count)
4506 {
4507 struct bitpack_d bp = streamer_read_bitpack (ib);
4508 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4509 }
4510 for (i = 0; i < count; i++)
4511 {
4512 struct ipa_agg_jf_item item;
4513 item.offset = streamer_read_uhwi (ib);
4514 item.value = stream_read_tree (ib, data_in);
4515 jump_func->agg.items->quick_push (item);
4516 }
4517 }
4518
4519 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4520 relevant to indirect inlining to OB. */
4521
4522 static void
4523 ipa_write_indirect_edge_info (struct output_block *ob,
4524 struct cgraph_edge *cs)
4525 {
4526 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4527 struct bitpack_d bp;
4528
4529 streamer_write_hwi (ob, ii->param_index);
4530 bp = bitpack_create (ob->main_stream);
4531 bp_pack_value (&bp, ii->polymorphic, 1);
4532 bp_pack_value (&bp, ii->agg_contents, 1);
4533 bp_pack_value (&bp, ii->member_ptr, 1);
4534 bp_pack_value (&bp, ii->by_ref, 1);
4535 bp_pack_value (&bp, ii->vptr_changed, 1);
4536 streamer_write_bitpack (&bp);
4537 if (ii->agg_contents || ii->polymorphic)
4538 streamer_write_hwi (ob, ii->offset);
4539 else
4540 gcc_assert (ii->offset == 0);
4541
4542 if (ii->polymorphic)
4543 {
4544 streamer_write_hwi (ob, ii->otr_token);
4545 stream_write_tree (ob, ii->otr_type, true);
4546 ii->context.stream_out (ob);
4547 }
4548 }
4549
4550 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4551 relevant to indirect inlining from IB. */
4552
4553 static void
4554 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4555 struct data_in *data_in,
4556 struct cgraph_edge *cs)
4557 {
4558 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4559 struct bitpack_d bp;
4560
4561 ii->param_index = (int) streamer_read_hwi (ib);
4562 bp = streamer_read_bitpack (ib);
4563 ii->polymorphic = bp_unpack_value (&bp, 1);
4564 ii->agg_contents = bp_unpack_value (&bp, 1);
4565 ii->member_ptr = bp_unpack_value (&bp, 1);
4566 ii->by_ref = bp_unpack_value (&bp, 1);
4567 ii->vptr_changed = bp_unpack_value (&bp, 1);
4568 if (ii->agg_contents || ii->polymorphic)
4569 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4570 else
4571 ii->offset = 0;
4572 if (ii->polymorphic)
4573 {
4574 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4575 ii->otr_type = stream_read_tree (ib, data_in);
4576 ii->context.stream_in (ib, data_in);
4577 }
4578 }
4579
4580 /* Stream out NODE info to OB. */
4581
4582 static void
4583 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4584 {
4585 int node_ref;
4586 lto_symtab_encoder_t encoder;
4587 struct ipa_node_params *info = IPA_NODE_REF (node);
4588 int j;
4589 struct cgraph_edge *e;
4590 struct bitpack_d bp;
4591
4592 encoder = ob->decl_state->symtab_node_encoder;
4593 node_ref = lto_symtab_encoder_encode (encoder, node);
4594 streamer_write_uhwi (ob, node_ref);
4595
4596 streamer_write_uhwi (ob, ipa_get_param_count (info));
4597 for (j = 0; j < ipa_get_param_count (info); j++)
4598 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4599 bp = bitpack_create (ob->main_stream);
4600 gcc_assert (info->analysis_done
4601 || ipa_get_param_count (info) == 0);
4602 gcc_assert (!info->node_enqueued);
4603 gcc_assert (!info->ipcp_orig_node);
4604 for (j = 0; j < ipa_get_param_count (info); j++)
4605 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4606 streamer_write_bitpack (&bp);
4607 for (j = 0; j < ipa_get_param_count (info); j++)
4608 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4609 for (e = node->callees; e; e = e->next_callee)
4610 {
4611 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4612
4613 streamer_write_uhwi (ob,
4614 ipa_get_cs_argument_count (args) * 2
4615 + (args->polymorphic_call_contexts != NULL));
4616 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4617 {
4618 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4619 if (args->polymorphic_call_contexts != NULL)
4620 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4621 }
4622 }
4623 for (e = node->indirect_calls; e; e = e->next_callee)
4624 {
4625 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4626
4627 streamer_write_uhwi (ob,
4628 ipa_get_cs_argument_count (args) * 2
4629 + (args->polymorphic_call_contexts != NULL));
4630 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4631 {
4632 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4633 if (args->polymorphic_call_contexts != NULL)
4634 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4635 }
4636 ipa_write_indirect_edge_info (ob, e);
4637 }
4638 }
4639
4640 /* Stream in NODE info from IB. */
4641
4642 static void
4643 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4644 struct data_in *data_in)
4645 {
4646 struct ipa_node_params *info = IPA_NODE_REF (node);
4647 int k;
4648 struct cgraph_edge *e;
4649 struct bitpack_d bp;
4650
4651 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4652
4653 for (k = 0; k < ipa_get_param_count (info); k++)
4654 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4655
4656 bp = streamer_read_bitpack (ib);
4657 if (ipa_get_param_count (info) != 0)
4658 info->analysis_done = true;
4659 info->node_enqueued = false;
4660 for (k = 0; k < ipa_get_param_count (info); k++)
4661 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4662 for (k = 0; k < ipa_get_param_count (info); k++)
4663 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4664 for (e = node->callees; e; e = e->next_callee)
4665 {
4666 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4667 int count = streamer_read_uhwi (ib);
4668 bool contexts_computed = count & 1;
4669 count /= 2;
4670
4671 if (!count)
4672 continue;
4673 vec_safe_grow_cleared (args->jump_functions, count);
4674 if (contexts_computed)
4675 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4676
4677 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4678 {
4679 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4680 data_in);
4681 if (contexts_computed)
4682 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4683 }
4684 }
4685 for (e = node->indirect_calls; e; e = e->next_callee)
4686 {
4687 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4688 int count = streamer_read_uhwi (ib);
4689 bool contexts_computed = count & 1;
4690 count /= 2;
4691
4692 if (count)
4693 {
4694 vec_safe_grow_cleared (args->jump_functions, count);
4695 if (contexts_computed)
4696 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4697 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4698 {
4699 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4700 data_in);
4701 if (contexts_computed)
4702 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4703 }
4704 }
4705 ipa_read_indirect_edge_info (ib, data_in, e);
4706 }
4707 }
4708
4709 /* Write jump functions for nodes in SET. */
4710
4711 void
4712 ipa_prop_write_jump_functions (void)
4713 {
4714 struct cgraph_node *node;
4715 struct output_block *ob;
4716 unsigned int count = 0;
4717 lto_symtab_encoder_iterator lsei;
4718 lto_symtab_encoder_t encoder;
4719
4720
4721 if (!ipa_node_params_vector.exists ())
4722 return;
4723
4724 ob = create_output_block (LTO_section_jump_functions);
4725 encoder = ob->decl_state->symtab_node_encoder;
4726 ob->symbol = NULL;
4727 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4728 lsei_next_function_in_partition (&lsei))
4729 {
4730 node = lsei_cgraph_node (lsei);
4731 if (node->has_gimple_body_p ()
4732 && IPA_NODE_REF (node) != NULL)
4733 count++;
4734 }
4735
4736 streamer_write_uhwi (ob, count);
4737
4738 /* Process all of the functions. */
4739 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4740 lsei_next_function_in_partition (&lsei))
4741 {
4742 node = lsei_cgraph_node (lsei);
4743 if (node->has_gimple_body_p ()
4744 && IPA_NODE_REF (node) != NULL)
4745 ipa_write_node_info (ob, node);
4746 }
4747 streamer_write_char_stream (ob->main_stream, 0);
4748 produce_asm (ob, NULL);
4749 destroy_output_block (ob);
4750 }
4751
4752 /* Read section in file FILE_DATA of length LEN with data DATA. */
4753
4754 static void
4755 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4756 size_t len)
4757 {
4758 const struct lto_function_header *header =
4759 (const struct lto_function_header *) data;
4760 const int cfg_offset = sizeof (struct lto_function_header);
4761 const int main_offset = cfg_offset + header->cfg_size;
4762 const int string_offset = main_offset + header->main_size;
4763 struct data_in *data_in;
4764 unsigned int i;
4765 unsigned int count;
4766
4767 lto_input_block ib_main ((const char *) data + main_offset,
4768 header->main_size);
4769
4770 data_in =
4771 lto_data_in_create (file_data, (const char *) data + string_offset,
4772 header->string_size, vNULL);
4773 count = streamer_read_uhwi (&ib_main);
4774
4775 for (i = 0; i < count; i++)
4776 {
4777 unsigned int index;
4778 struct cgraph_node *node;
4779 lto_symtab_encoder_t encoder;
4780
4781 index = streamer_read_uhwi (&ib_main);
4782 encoder = file_data->symtab_node_encoder;
4783 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4784 index));
4785 gcc_assert (node->definition);
4786 ipa_read_node_info (&ib_main, node, data_in);
4787 }
4788 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4789 len);
4790 lto_data_in_delete (data_in);
4791 }
4792
4793 /* Read ipcp jump functions. */
4794
4795 void
4796 ipa_prop_read_jump_functions (void)
4797 {
4798 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4799 struct lto_file_decl_data *file_data;
4800 unsigned int j = 0;
4801
4802 ipa_check_create_node_params ();
4803 ipa_check_create_edge_args ();
4804 ipa_register_cgraph_hooks ();
4805
4806 while ((file_data = file_data_vec[j++]))
4807 {
4808 size_t len;
4809 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4810
4811 if (data)
4812 ipa_prop_read_section (file_data, data, len);
4813 }
4814 }
4815
4816 /* After merging units, we can get mismatch in argument counts.
4817 Also decl merging might've rendered parameter lists obsolete.
4818 Also compute called_with_variable_arg info. */
4819
4820 void
4821 ipa_update_after_lto_read (void)
4822 {
4823 ipa_check_create_node_params ();
4824 ipa_check_create_edge_args ();
4825 }
4826
4827 void
4828 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4829 {
4830 int node_ref;
4831 unsigned int count = 0;
4832 lto_symtab_encoder_t encoder;
4833 struct ipa_agg_replacement_value *aggvals, *av;
4834
4835 aggvals = ipa_get_agg_replacements_for_node (node);
4836 encoder = ob->decl_state->symtab_node_encoder;
4837 node_ref = lto_symtab_encoder_encode (encoder, node);
4838 streamer_write_uhwi (ob, node_ref);
4839
4840 for (av = aggvals; av; av = av->next)
4841 count++;
4842 streamer_write_uhwi (ob, count);
4843
4844 for (av = aggvals; av; av = av->next)
4845 {
4846 struct bitpack_d bp;
4847
4848 streamer_write_uhwi (ob, av->offset);
4849 streamer_write_uhwi (ob, av->index);
4850 stream_write_tree (ob, av->value, true);
4851
4852 bp = bitpack_create (ob->main_stream);
4853 bp_pack_value (&bp, av->by_ref, 1);
4854 streamer_write_bitpack (&bp);
4855 }
4856 }
4857
4858 /* Stream in the aggregate value replacement chain for NODE from IB. */
4859
4860 static void
4861 read_agg_replacement_chain (struct lto_input_block *ib,
4862 struct cgraph_node *node,
4863 struct data_in *data_in)
4864 {
4865 struct ipa_agg_replacement_value *aggvals = NULL;
4866 unsigned int count, i;
4867
4868 count = streamer_read_uhwi (ib);
4869 for (i = 0; i <count; i++)
4870 {
4871 struct ipa_agg_replacement_value *av;
4872 struct bitpack_d bp;
4873
4874 av = ggc_alloc<ipa_agg_replacement_value> ();
4875 av->offset = streamer_read_uhwi (ib);
4876 av->index = streamer_read_uhwi (ib);
4877 av->value = stream_read_tree (ib, data_in);
4878 bp = streamer_read_bitpack (ib);
4879 av->by_ref = bp_unpack_value (&bp, 1);
4880 av->next = aggvals;
4881 aggvals = av;
4882 }
4883 ipa_set_node_agg_value_chain (node, aggvals);
4884 }
4885
4886 /* Write all aggregate replacement for nodes in set. */
4887
4888 void
4889 ipa_prop_write_all_agg_replacement (void)
4890 {
4891 struct cgraph_node *node;
4892 struct output_block *ob;
4893 unsigned int count = 0;
4894 lto_symtab_encoder_iterator lsei;
4895 lto_symtab_encoder_t encoder;
4896
4897 if (!ipa_node_agg_replacements)
4898 return;
4899
4900 ob = create_output_block (LTO_section_ipcp_transform);
4901 encoder = ob->decl_state->symtab_node_encoder;
4902 ob->symbol = NULL;
4903 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4904 lsei_next_function_in_partition (&lsei))
4905 {
4906 node = lsei_cgraph_node (lsei);
4907 if (node->has_gimple_body_p ()
4908 && ipa_get_agg_replacements_for_node (node) != NULL)
4909 count++;
4910 }
4911
4912 streamer_write_uhwi (ob, count);
4913
4914 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4915 lsei_next_function_in_partition (&lsei))
4916 {
4917 node = lsei_cgraph_node (lsei);
4918 if (node->has_gimple_body_p ()
4919 && ipa_get_agg_replacements_for_node (node) != NULL)
4920 write_agg_replacement_chain (ob, node);
4921 }
4922 streamer_write_char_stream (ob->main_stream, 0);
4923 produce_asm (ob, NULL);
4924 destroy_output_block (ob);
4925 }
4926
4927 /* Read replacements section in file FILE_DATA of length LEN with data
4928 DATA. */
4929
4930 static void
4931 read_replacements_section (struct lto_file_decl_data *file_data,
4932 const char *data,
4933 size_t len)
4934 {
4935 const struct lto_function_header *header =
4936 (const struct lto_function_header *) data;
4937 const int cfg_offset = sizeof (struct lto_function_header);
4938 const int main_offset = cfg_offset + header->cfg_size;
4939 const int string_offset = main_offset + header->main_size;
4940 struct data_in *data_in;
4941 unsigned int i;
4942 unsigned int count;
4943
4944 lto_input_block ib_main ((const char *) data + main_offset,
4945 header->main_size);
4946
4947 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4948 header->string_size, vNULL);
4949 count = streamer_read_uhwi (&ib_main);
4950
4951 for (i = 0; i < count; i++)
4952 {
4953 unsigned int index;
4954 struct cgraph_node *node;
4955 lto_symtab_encoder_t encoder;
4956
4957 index = streamer_read_uhwi (&ib_main);
4958 encoder = file_data->symtab_node_encoder;
4959 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4960 index));
4961 gcc_assert (node->definition);
4962 read_agg_replacement_chain (&ib_main, node, data_in);
4963 }
4964 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4965 len);
4966 lto_data_in_delete (data_in);
4967 }
4968
4969 /* Read IPA-CP aggregate replacements. */
4970
4971 void
4972 ipa_prop_read_all_agg_replacement (void)
4973 {
4974 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4975 struct lto_file_decl_data *file_data;
4976 unsigned int j = 0;
4977
4978 while ((file_data = file_data_vec[j++]))
4979 {
4980 size_t len;
4981 const char *data = lto_get_section_data (file_data,
4982 LTO_section_ipcp_transform,
4983 NULL, &len);
4984 if (data)
4985 read_replacements_section (file_data, data, len);
4986 }
4987 }
4988
4989 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4990 NODE. */
4991
4992 static void
4993 adjust_agg_replacement_values (struct cgraph_node *node,
4994 struct ipa_agg_replacement_value *aggval)
4995 {
4996 struct ipa_agg_replacement_value *v;
4997 int i, c = 0, d = 0, *adj;
4998
4999 if (!node->clone.combined_args_to_skip)
5000 return;
5001
5002 for (v = aggval; v; v = v->next)
5003 {
5004 gcc_assert (v->index >= 0);
5005 if (c < v->index)
5006 c = v->index;
5007 }
5008 c++;
5009
5010 adj = XALLOCAVEC (int, c);
5011 for (i = 0; i < c; i++)
5012 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5013 {
5014 adj[i] = -1;
5015 d++;
5016 }
5017 else
5018 adj[i] = i - d;
5019
5020 for (v = aggval; v; v = v->next)
5021 v->index = adj[v->index];
5022 }
5023
5024 /* Dominator walker driving the ipcp modification phase. */
5025
5026 class ipcp_modif_dom_walker : public dom_walker
5027 {
5028 public:
5029 ipcp_modif_dom_walker (struct func_body_info *fbi,
5030 vec<ipa_param_descriptor> descs,
5031 struct ipa_agg_replacement_value *av,
5032 bool *sc, bool *cc)
5033 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5034 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5035
5036 virtual void before_dom_children (basic_block);
5037
5038 private:
5039 struct func_body_info *m_fbi;
5040 vec<ipa_param_descriptor> m_descriptors;
5041 struct ipa_agg_replacement_value *m_aggval;
5042 bool *m_something_changed, *m_cfg_changed;
5043 };
5044
5045 void
5046 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5047 {
5048 gimple_stmt_iterator gsi;
5049 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5050 {
5051 struct ipa_agg_replacement_value *v;
5052 gimple stmt = gsi_stmt (gsi);
5053 tree rhs, val, t;
5054 HOST_WIDE_INT offset, size;
5055 int index;
5056 bool by_ref, vce;
5057
5058 if (!gimple_assign_load_p (stmt))
5059 continue;
5060 rhs = gimple_assign_rhs1 (stmt);
5061 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5062 continue;
5063
5064 vce = false;
5065 t = rhs;
5066 while (handled_component_p (t))
5067 {
5068 /* V_C_E can do things like convert an array of integers to one
5069 bigger integer and similar things we do not handle below. */
5070 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5071 {
5072 vce = true;
5073 break;
5074 }
5075 t = TREE_OPERAND (t, 0);
5076 }
5077 if (vce)
5078 continue;
5079
5080 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5081 &offset, &size, &by_ref))
5082 continue;
5083 for (v = m_aggval; v; v = v->next)
5084 if (v->index == index
5085 && v->offset == offset)
5086 break;
5087 if (!v
5088 || v->by_ref != by_ref
5089 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5090 continue;
5091
5092 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5093 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5094 {
5095 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5096 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5097 else if (TYPE_SIZE (TREE_TYPE (rhs))
5098 == TYPE_SIZE (TREE_TYPE (v->value)))
5099 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5100 else
5101 {
5102 if (dump_file)
5103 {
5104 fprintf (dump_file, " const ");
5105 print_generic_expr (dump_file, v->value, 0);
5106 fprintf (dump_file, " can't be converted to type of ");
5107 print_generic_expr (dump_file, rhs, 0);
5108 fprintf (dump_file, "\n");
5109 }
5110 continue;
5111 }
5112 }
5113 else
5114 val = v->value;
5115
5116 if (dump_file && (dump_flags & TDF_DETAILS))
5117 {
5118 fprintf (dump_file, "Modifying stmt:\n ");
5119 print_gimple_stmt (dump_file, stmt, 0, 0);
5120 }
5121 gimple_assign_set_rhs_from_tree (&gsi, val);
5122 update_stmt (stmt);
5123
5124 if (dump_file && (dump_flags & TDF_DETAILS))
5125 {
5126 fprintf (dump_file, "into:\n ");
5127 print_gimple_stmt (dump_file, stmt, 0, 0);
5128 fprintf (dump_file, "\n");
5129 }
5130
5131 *m_something_changed = true;
5132 if (maybe_clean_eh_stmt (stmt)
5133 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5134 *m_cfg_changed = true;
5135 }
5136
5137 }
5138
5139 /* IPCP transformation phase doing propagation of aggregate values. */
5140
5141 unsigned int
5142 ipcp_transform_function (struct cgraph_node *node)
5143 {
5144 vec<ipa_param_descriptor> descriptors = vNULL;
5145 struct func_body_info fbi;
5146 struct ipa_agg_replacement_value *aggval;
5147 int param_count;
5148 bool cfg_changed = false, something_changed = false;
5149
5150 gcc_checking_assert (cfun);
5151 gcc_checking_assert (current_function_decl);
5152
5153 if (dump_file)
5154 fprintf (dump_file, "Modification phase of node %s/%i\n",
5155 node->name (), node->order);
5156
5157 aggval = ipa_get_agg_replacements_for_node (node);
5158 if (!aggval)
5159 return 0;
5160 param_count = count_formal_params (node->decl);
5161 if (param_count == 0)
5162 return 0;
5163 adjust_agg_replacement_values (node, aggval);
5164 if (dump_file)
5165 ipa_dump_agg_replacement_values (dump_file, aggval);
5166
5167 fbi.node = node;
5168 fbi.info = NULL;
5169 fbi.bb_infos = vNULL;
5170 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5171 fbi.param_count = param_count;
5172 fbi.aa_walked = 0;
5173
5174 descriptors.safe_grow_cleared (param_count);
5175 ipa_populate_param_decls (node, descriptors);
5176 calculate_dominance_info (CDI_DOMINATORS);
5177 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5178 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5179
5180 int i;
5181 struct ipa_bb_info *bi;
5182 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5183 free_ipa_bb_info (bi);
5184 fbi.bb_infos.release ();
5185 free_dominance_info (CDI_DOMINATORS);
5186 (*ipa_node_agg_replacements)[node->uid] = NULL;
5187 descriptors.release ();
5188
5189 if (!something_changed)
5190 return 0;
5191 else if (cfg_changed)
5192 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5193 else
5194 return TODO_update_ssa_only_virtuals;
5195 }