c5da8f84fdbb516528e6a205d3dae0b3cbf6b846
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "timevar.h"
36 #include "flags.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
41
42
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
45
46 struct param_analysis_info
47 {
48 bool modified;
49 bitmap visited_statements;
50 };
51
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
56
57 /* Bitmap with all UIDs of call graph edges that have been already processed
58 by indirect inlining. */
59 static bitmap iinlining_processed_edges;
60
61 /* Holders of ipa cgraph hooks: */
62 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
63 static struct cgraph_node_hook_list *node_removal_hook_holder;
64 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
65 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
66
67 /* Add cgraph NODE described by INFO to the worklist WL regardless of whether
68 it is in one or not. It should almost never be used directly, as opposed to
69 ipa_push_func_to_list. */
70
71 void
72 ipa_push_func_to_list_1 (struct ipa_func_list **wl,
73 struct cgraph_node *node,
74 struct ipa_node_params *info)
75 {
76 struct ipa_func_list *temp;
77
78 info->node_enqueued = 1;
79 temp = XCNEW (struct ipa_func_list);
80 temp->node = node;
81 temp->next = *wl;
82 *wl = temp;
83 }
84
85 /* Initialize worklist to contain all functions. */
86
87 struct ipa_func_list *
88 ipa_init_func_list (void)
89 {
90 struct cgraph_node *node;
91 struct ipa_func_list * wl;
92
93 wl = NULL;
94 for (node = cgraph_nodes; node; node = node->next)
95 if (node->analyzed)
96 {
97 struct ipa_node_params *info = IPA_NODE_REF (node);
98 /* Unreachable nodes should have been eliminated before ipcp and
99 inlining. */
100 gcc_assert (node->needed || node->reachable);
101 ipa_push_func_to_list_1 (&wl, node, info);
102 }
103
104 return wl;
105 }
106
107 /* Remove a function from the worklist WL and return it. */
108
109 struct cgraph_node *
110 ipa_pop_func_from_list (struct ipa_func_list **wl)
111 {
112 struct ipa_node_params *info;
113 struct ipa_func_list *first;
114 struct cgraph_node *node;
115
116 first = *wl;
117 *wl = (*wl)->next;
118 node = first->node;
119 free (first);
120
121 info = IPA_NODE_REF (node);
122 info->node_enqueued = 0;
123 return node;
124 }
125
126 /* Return index of the formal whose tree is PTREE in function which corresponds
127 to INFO. */
128
129 static int
130 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
131 {
132 int i, count;
133
134 count = ipa_get_param_count (info);
135 for (i = 0; i < count; i++)
136 if (ipa_get_param(info, i) == ptree)
137 return i;
138
139 return -1;
140 }
141
142 /* Populate the param_decl field in parameter descriptors of INFO that
143 corresponds to NODE. */
144
145 static void
146 ipa_populate_param_decls (struct cgraph_node *node,
147 struct ipa_node_params *info)
148 {
149 tree fndecl;
150 tree fnargs;
151 tree parm;
152 int param_num;
153
154 fndecl = node->decl;
155 fnargs = DECL_ARGUMENTS (fndecl);
156 param_num = 0;
157 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
158 {
159 info->params[param_num].decl = parm;
160 param_num++;
161 }
162 }
163
164 /* Return how many formal parameters FNDECL has. */
165
166 static inline int
167 count_formal_params_1 (tree fndecl)
168 {
169 tree parm;
170 int count = 0;
171
172 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
173 count++;
174
175 return count;
176 }
177
178 /* Count number of formal parameters in NOTE. Store the result to the
179 appropriate field of INFO. */
180
181 static void
182 ipa_count_formal_params (struct cgraph_node *node,
183 struct ipa_node_params *info)
184 {
185 int param_num;
186
187 param_num = count_formal_params_1 (node->decl);
188 ipa_set_param_count (info, param_num);
189 }
190
191 /* Initialize the ipa_node_params structure associated with NODE by counting
192 the function parameters, creating the descriptors and populating their
193 param_decls. */
194
195 void
196 ipa_initialize_node_params (struct cgraph_node *node)
197 {
198 struct ipa_node_params *info = IPA_NODE_REF (node);
199
200 if (!info->params)
201 {
202 ipa_count_formal_params (node, info);
203 info->params = XCNEWVEC (struct ipa_param_descriptor,
204 ipa_get_param_count (info));
205 ipa_populate_param_decls (node, info);
206 }
207 }
208
209 /* Count number of arguments callsite CS has and store it in
210 ipa_edge_args structure corresponding to this callsite. */
211
212 static void
213 ipa_count_arguments (struct cgraph_edge *cs)
214 {
215 gimple stmt;
216 int arg_num;
217
218 stmt = cs->call_stmt;
219 gcc_assert (is_gimple_call (stmt));
220 arg_num = gimple_call_num_args (stmt);
221 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
222 <= (unsigned) cgraph_edge_max_uid)
223 VEC_safe_grow_cleared (ipa_edge_args_t, gc,
224 ipa_edge_args_vector, cgraph_edge_max_uid + 1);
225 ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
226 }
227
228 /* Print the jump functions associated with call graph edge CS to file F. */
229
230 static void
231 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
232 {
233 int i, count;
234
235 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
236 for (i = 0; i < count; i++)
237 {
238 struct ipa_jump_func *jump_func;
239 enum jump_func_type type;
240
241 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
242 type = jump_func->type;
243
244 fprintf (f, " param %d: ", i);
245 if (type == IPA_JF_UNKNOWN)
246 fprintf (f, "UNKNOWN\n");
247 else if (type == IPA_JF_KNOWN_TYPE)
248 {
249 tree binfo_type = TREE_TYPE (jump_func->value.base_binfo);
250 fprintf (f, "KNOWN TYPE, type in binfo is: ");
251 print_generic_expr (f, binfo_type, 0);
252 fprintf (f, " (%u)\n", TYPE_UID (binfo_type));
253 }
254 else if (type == IPA_JF_CONST)
255 {
256 tree val = jump_func->value.constant;
257 fprintf (f, "CONST: ");
258 print_generic_expr (f, val, 0);
259 if (TREE_CODE (val) == ADDR_EXPR
260 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
261 {
262 fprintf (f, " -> ");
263 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
264 0);
265 }
266 fprintf (f, "\n");
267 }
268 else if (type == IPA_JF_CONST_MEMBER_PTR)
269 {
270 fprintf (f, "CONST MEMBER PTR: ");
271 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
272 fprintf (f, ", ");
273 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
274 fprintf (f, "\n");
275 }
276 else if (type == IPA_JF_PASS_THROUGH)
277 {
278 fprintf (f, "PASS THROUGH: ");
279 fprintf (f, "%d, op %s ",
280 jump_func->value.pass_through.formal_id,
281 tree_code_name[(int)
282 jump_func->value.pass_through.operation]);
283 if (jump_func->value.pass_through.operation != NOP_EXPR)
284 print_generic_expr (dump_file,
285 jump_func->value.pass_through.operand, 0);
286 fprintf (dump_file, "\n");
287 }
288 else if (type == IPA_JF_ANCESTOR)
289 {
290 fprintf (f, "ANCESTOR: ");
291 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
292 jump_func->value.ancestor.formal_id,
293 jump_func->value.ancestor.offset);
294 print_generic_expr (f, jump_func->value.ancestor.type, 0);
295 fprintf (dump_file, "\n");
296 }
297 }
298 }
299
300
301 /* Print the jump functions of all arguments on all call graph edges going from
302 NODE to file F. */
303
304 void
305 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
306 {
307 struct cgraph_edge *cs;
308 int i;
309
310 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
311 for (cs = node->callees; cs; cs = cs->next_callee)
312 {
313 if (!ipa_edge_args_info_available_for_edge_p (cs))
314 continue;
315
316 fprintf (f, " callsite %s/%i -> %s/%i : \n",
317 cgraph_node_name (node), node->uid,
318 cgraph_node_name (cs->callee), cs->callee->uid);
319 ipa_print_node_jump_functions_for_edge (f, cs);
320 }
321
322 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
323 {
324 if (!ipa_edge_args_info_available_for_edge_p (cs))
325 continue;
326
327 if (cs->call_stmt)
328 {
329 fprintf (f, " indirect callsite %d for stmt ", i);
330 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
331 }
332 else
333 fprintf (f, " indirect callsite %d :\n", i);
334 ipa_print_node_jump_functions_for_edge (f, cs);
335
336 }
337 }
338
339 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
340
341 void
342 ipa_print_all_jump_functions (FILE *f)
343 {
344 struct cgraph_node *node;
345
346 fprintf (f, "\nJump functions:\n");
347 for (node = cgraph_nodes; node; node = node->next)
348 {
349 ipa_print_node_jump_functions (f, node);
350 }
351 }
352
353 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
354 of an assignment statement STMT, try to find out whether NAME can be
355 described by a (possibly polynomial) pass-through jump-function or an
356 ancestor jump function and if so, write the appropriate function into
357 JFUNC */
358
359 static void
360 compute_complex_assign_jump_func (struct ipa_node_params *info,
361 struct ipa_jump_func *jfunc,
362 gimple stmt, tree name)
363 {
364 HOST_WIDE_INT offset, size, max_size;
365 tree op1, op2, type;
366 int index;
367
368 op1 = gimple_assign_rhs1 (stmt);
369 op2 = gimple_assign_rhs2 (stmt);
370
371 if (TREE_CODE (op1) == SSA_NAME
372 && SSA_NAME_IS_DEFAULT_DEF (op1))
373 {
374 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
375 if (index < 0)
376 return;
377
378 if (op2)
379 {
380 if (!is_gimple_ip_invariant (op2)
381 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
382 && !useless_type_conversion_p (TREE_TYPE (name),
383 TREE_TYPE (op1))))
384 return;
385
386 jfunc->type = IPA_JF_PASS_THROUGH;
387 jfunc->value.pass_through.formal_id = index;
388 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
389 jfunc->value.pass_through.operand = op2;
390 }
391 else if (gimple_assign_unary_nop_p (stmt))
392 {
393 jfunc->type = IPA_JF_PASS_THROUGH;
394 jfunc->value.pass_through.formal_id = index;
395 jfunc->value.pass_through.operation = NOP_EXPR;
396 }
397 return;
398 }
399
400 if (TREE_CODE (op1) != ADDR_EXPR)
401 return;
402
403 op1 = TREE_OPERAND (op1, 0);
404 type = TREE_TYPE (op1);
405 if (TREE_CODE (type) != RECORD_TYPE)
406 return;
407 op1 = get_ref_base_and_extent (op1, &offset, &size, &max_size);
408 if (TREE_CODE (op1) != MEM_REF
409 /* If this is a varying address, punt. */
410 || max_size == -1
411 || max_size != size)
412 return;
413 offset += mem_ref_offset (op1).low * BITS_PER_UNIT;
414 op1 = TREE_OPERAND (op1, 0);
415 if (TREE_CODE (op1) != SSA_NAME
416 || !SSA_NAME_IS_DEFAULT_DEF (op1))
417 return;
418
419 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
420 if (index >= 0)
421 {
422 jfunc->type = IPA_JF_ANCESTOR;
423 jfunc->value.ancestor.formal_id = index;
424 jfunc->value.ancestor.offset = offset;
425 jfunc->value.ancestor.type = type;
426 }
427 }
428
429
430 /* Given that an actual argument is an SSA_NAME that is a result of a phi
431 statement PHI, try to find out whether NAME is in fact a
432 multiple-inheritance typecast from a descendant into an ancestor of a formal
433 parameter and thus can be described by an ancestor jump function and if so,
434 write the appropriate function into JFUNC.
435
436 Essentially we want to match the following pattern:
437
438 if (obj_2(D) != 0B)
439 goto <bb 3>;
440 else
441 goto <bb 4>;
442
443 <bb 3>:
444 iftmp.1_3 = &obj_2(D)->D.1762;
445
446 <bb 4>:
447 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
448 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
449 return D.1879_6; */
450
451 static void
452 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
453 struct ipa_jump_func *jfunc,
454 gimple phi)
455 {
456 HOST_WIDE_INT offset, size, max_size;
457 gimple assign, cond;
458 basic_block phi_bb, assign_bb, cond_bb;
459 tree tmp, parm, expr;
460 int index, i;
461
462 if (gimple_phi_num_args (phi) != 2
463 || !integer_zerop (PHI_ARG_DEF (phi, 1)))
464 return;
465
466 tmp = PHI_ARG_DEF (phi, 0);
467 if (TREE_CODE (tmp) != SSA_NAME
468 || SSA_NAME_IS_DEFAULT_DEF (tmp)
469 || !POINTER_TYPE_P (TREE_TYPE (tmp))
470 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
471 return;
472
473 assign = SSA_NAME_DEF_STMT (tmp);
474 assign_bb = gimple_bb (assign);
475 if (!single_pred_p (assign_bb)
476 || !gimple_assign_single_p (assign))
477 return;
478 expr = gimple_assign_rhs1 (assign);
479
480 if (TREE_CODE (expr) != ADDR_EXPR)
481 return;
482 expr = TREE_OPERAND (expr, 0);
483 expr = get_ref_base_and_extent (expr, &offset, &size, &max_size);
484
485 if (TREE_CODE (expr) != MEM_REF
486 /* If this is a varying address, punt. */
487 || max_size == -1
488 || max_size != size)
489 return;
490 offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
491 parm = TREE_OPERAND (expr, 0);
492 if (TREE_CODE (parm) != SSA_NAME
493 || !SSA_NAME_IS_DEFAULT_DEF (parm))
494 return;
495
496 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
497 if (index < 0)
498 return;
499
500 cond_bb = single_pred (assign_bb);
501 cond = last_stmt (cond_bb);
502 if (!cond
503 || gimple_code (cond) != GIMPLE_COND
504 || gimple_cond_code (cond) != NE_EXPR
505 || gimple_cond_lhs (cond) != parm
506 || !integer_zerop (gimple_cond_rhs (cond)))
507 return;
508
509
510 phi_bb = gimple_bb (phi);
511 for (i = 0; i < 2; i++)
512 {
513 basic_block pred = EDGE_PRED (phi_bb, i)->src;
514 if (pred != assign_bb && pred != cond_bb)
515 return;
516 }
517
518 jfunc->type = IPA_JF_ANCESTOR;
519 jfunc->value.ancestor.formal_id = index;
520 jfunc->value.ancestor.offset = offset;
521 jfunc->value.ancestor.type = TREE_TYPE (TREE_TYPE (tmp));
522 }
523
524 /* Given OP whch is passed as an actual argument to a called function,
525 determine if it is possible to construct a KNOWN_TYPE jump function for it
526 and if so, create one and store it to JFUNC. */
527
528 static void
529 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc)
530 {
531 tree binfo;
532
533 if (TREE_CODE (op) != ADDR_EXPR)
534 return;
535
536 op = TREE_OPERAND (op, 0);
537 binfo = gimple_get_relevant_ref_binfo (op, NULL_TREE);
538 if (binfo)
539 {
540 jfunc->type = IPA_JF_KNOWN_TYPE;
541 jfunc->value.base_binfo = binfo;
542 }
543 }
544
545
546 /* Determine the jump functions of scalar arguments. Scalar means SSA names
547 and constants of a number of selected types. INFO is the ipa_node_params
548 structure associated with the caller, FUNCTIONS is a pointer to an array of
549 jump function structures associated with CALL which is the call statement
550 being examined.*/
551
552 static void
553 compute_scalar_jump_functions (struct ipa_node_params *info,
554 struct ipa_jump_func *functions,
555 gimple call)
556 {
557 tree arg;
558 unsigned num = 0;
559
560 for (num = 0; num < gimple_call_num_args (call); num++)
561 {
562 arg = gimple_call_arg (call, num);
563
564 if (is_gimple_ip_invariant (arg))
565 {
566 functions[num].type = IPA_JF_CONST;
567 functions[num].value.constant = arg;
568 }
569 else if (TREE_CODE (arg) == SSA_NAME)
570 {
571 if (SSA_NAME_IS_DEFAULT_DEF (arg))
572 {
573 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
574
575 if (index >= 0)
576 {
577 functions[num].type = IPA_JF_PASS_THROUGH;
578 functions[num].value.pass_through.formal_id = index;
579 functions[num].value.pass_through.operation = NOP_EXPR;
580 }
581 }
582 else
583 {
584 gimple stmt = SSA_NAME_DEF_STMT (arg);
585 if (is_gimple_assign (stmt))
586 compute_complex_assign_jump_func (info, &functions[num],
587 stmt, arg);
588 else if (gimple_code (stmt) == GIMPLE_PHI)
589 compute_complex_ancestor_jump_func (info, &functions[num],
590 stmt);
591 }
592 }
593 else
594 compute_known_type_jump_func (arg, &functions[num]);
595 }
596 }
597
598 /* Inspect the given TYPE and return true iff it has the same structure (the
599 same number of fields of the same types) as a C++ member pointer. If
600 METHOD_PTR and DELTA are non-NULL, store the trees representing the
601 corresponding fields there. */
602
603 static bool
604 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
605 {
606 tree fld;
607
608 if (TREE_CODE (type) != RECORD_TYPE)
609 return false;
610
611 fld = TYPE_FIELDS (type);
612 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
613 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
614 return false;
615
616 if (method_ptr)
617 *method_ptr = fld;
618
619 fld = DECL_CHAIN (fld);
620 if (!fld || INTEGRAL_TYPE_P (fld))
621 return false;
622 if (delta)
623 *delta = fld;
624
625 if (DECL_CHAIN (fld))
626 return false;
627
628 return true;
629 }
630
631 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
632 boolean variable pointed to by DATA. */
633
634 static bool
635 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
636 void *data)
637 {
638 bool *b = (bool *) data;
639 *b = true;
640 return true;
641 }
642
643 /* Return true if the formal parameter PARM might have been modified in this
644 function before reaching the statement CALL. PARM_INFO is a pointer to a
645 structure containing intermediate information about PARM. */
646
647 static bool
648 is_parm_modified_before_call (struct param_analysis_info *parm_info,
649 gimple call, tree parm)
650 {
651 bool modified = false;
652 ao_ref refd;
653
654 if (parm_info->modified)
655 return true;
656
657 ao_ref_init (&refd, parm);
658 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
659 &modified, &parm_info->visited_statements);
660 if (modified)
661 {
662 parm_info->modified = true;
663 return true;
664 }
665 return false;
666 }
667
668 /* Go through arguments of the CALL and for every one that looks like a member
669 pointer, check whether it can be safely declared pass-through and if so,
670 mark that to the corresponding item of jump FUNCTIONS. Return true iff
671 there are non-pass-through member pointers within the arguments. INFO
672 describes formal parameters of the caller. PARMS_INFO is a pointer to a
673 vector containing intermediate information about each formal parameter. */
674
675 static bool
676 compute_pass_through_member_ptrs (struct ipa_node_params *info,
677 struct param_analysis_info *parms_info,
678 struct ipa_jump_func *functions,
679 gimple call)
680 {
681 bool undecided_members = false;
682 unsigned num;
683 tree arg;
684
685 for (num = 0; num < gimple_call_num_args (call); num++)
686 {
687 arg = gimple_call_arg (call, num);
688
689 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
690 {
691 if (TREE_CODE (arg) == PARM_DECL)
692 {
693 int index = ipa_get_param_decl_index (info, arg);
694
695 gcc_assert (index >=0);
696 if (!is_parm_modified_before_call (&parms_info[index], call, arg))
697 {
698 functions[num].type = IPA_JF_PASS_THROUGH;
699 functions[num].value.pass_through.formal_id = index;
700 functions[num].value.pass_through.operation = NOP_EXPR;
701 }
702 else
703 undecided_members = true;
704 }
705 else
706 undecided_members = true;
707 }
708 }
709
710 return undecided_members;
711 }
712
713 /* Simple function filling in a member pointer constant jump function (with PFN
714 and DELTA as the constant value) into JFUNC. */
715
716 static void
717 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
718 tree pfn, tree delta)
719 {
720 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
721 jfunc->value.member_cst.pfn = pfn;
722 jfunc->value.member_cst.delta = delta;
723 }
724
725 /* If RHS is an SSA_NAMe and it is defined by a simple copy assign statement,
726 return the rhs of its defining statement. */
727
728 static inline tree
729 get_ssa_def_if_simple_copy (tree rhs)
730 {
731 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
732 {
733 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
734
735 if (gimple_assign_single_p (def_stmt))
736 rhs = gimple_assign_rhs1 (def_stmt);
737 else
738 break;
739 }
740 return rhs;
741 }
742
743 /* Traverse statements from CALL backwards, scanning whether the argument ARG
744 which is a member pointer is filled in with constant values. If it is, fill
745 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
746 fields of the record type of the member pointer. To give an example, we
747 look for a pattern looking like the following:
748
749 D.2515.__pfn ={v} printStuff;
750 D.2515.__delta ={v} 0;
751 i_1 = doprinting (D.2515); */
752
753 static void
754 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
755 tree delta_field, struct ipa_jump_func *jfunc)
756 {
757 gimple_stmt_iterator gsi;
758 tree method = NULL_TREE;
759 tree delta = NULL_TREE;
760
761 gsi = gsi_for_stmt (call);
762
763 gsi_prev (&gsi);
764 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
765 {
766 gimple stmt = gsi_stmt (gsi);
767 tree lhs, rhs, fld;
768
769 if (!stmt_may_clobber_ref_p (stmt, arg))
770 continue;
771 if (!gimple_assign_single_p (stmt))
772 return;
773
774 lhs = gimple_assign_lhs (stmt);
775 rhs = gimple_assign_rhs1 (stmt);
776
777 if (TREE_CODE (lhs) != COMPONENT_REF
778 || TREE_OPERAND (lhs, 0) != arg)
779 return;
780
781 fld = TREE_OPERAND (lhs, 1);
782 if (!method && fld == method_field)
783 {
784 rhs = get_ssa_def_if_simple_copy (rhs);
785 if (TREE_CODE (rhs) == ADDR_EXPR
786 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
787 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
788 {
789 method = TREE_OPERAND (rhs, 0);
790 if (delta)
791 {
792 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
793 return;
794 }
795 }
796 else
797 return;
798 }
799
800 if (!delta && fld == delta_field)
801 {
802 rhs = get_ssa_def_if_simple_copy (rhs);
803 if (TREE_CODE (rhs) == INTEGER_CST)
804 {
805 delta = rhs;
806 if (method)
807 {
808 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
809 return;
810 }
811 }
812 else
813 return;
814 }
815 }
816
817 return;
818 }
819
820 /* Go through the arguments of the CALL and for every member pointer within
821 tries determine whether it is a constant. If it is, create a corresponding
822 constant jump function in FUNCTIONS which is an array of jump functions
823 associated with the call. */
824
825 static void
826 compute_cst_member_ptr_arguments (struct ipa_jump_func *functions,
827 gimple call)
828 {
829 unsigned num;
830 tree arg, method_field, delta_field;
831
832 for (num = 0; num < gimple_call_num_args (call); num++)
833 {
834 arg = gimple_call_arg (call, num);
835
836 if (functions[num].type == IPA_JF_UNKNOWN
837 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
838 &delta_field))
839 determine_cst_member_ptr (call, arg, method_field, delta_field,
840 &functions[num]);
841 }
842 }
843
844 /* Compute jump function for all arguments of callsite CS and insert the
845 information in the jump_functions array in the ipa_edge_args corresponding
846 to this callsite. */
847
848 static void
849 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_info,
850 struct cgraph_edge *cs)
851 {
852 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
853 struct ipa_edge_args *arguments = IPA_EDGE_REF (cs);
854 gimple call;
855
856 if (ipa_get_cs_argument_count (arguments) == 0 || arguments->jump_functions)
857 return;
858 arguments->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
859 (ipa_get_cs_argument_count (arguments));
860
861 call = cs->call_stmt;
862 gcc_assert (is_gimple_call (call));
863
864 /* We will deal with constants and SSA scalars first: */
865 compute_scalar_jump_functions (info, arguments->jump_functions, call);
866
867 /* Let's check whether there are any potential member pointers and if so,
868 whether we can determine their functions as pass_through. */
869 if (!compute_pass_through_member_ptrs (info, parms_info,
870 arguments->jump_functions, call))
871 return;
872
873 /* Finally, let's check whether we actually pass a new constant member
874 pointer here... */
875 compute_cst_member_ptr_arguments (arguments->jump_functions, call);
876 }
877
878 /* Compute jump functions for all edges - both direct and indirect - outgoing
879 from NODE. Also count the actual arguments in the process. */
880
881 static void
882 ipa_compute_jump_functions (struct cgraph_node *node,
883 struct param_analysis_info *parms_info)
884 {
885 struct cgraph_edge *cs;
886
887 for (cs = node->callees; cs; cs = cs->next_callee)
888 {
889 /* We do not need to bother analyzing calls to unknown
890 functions unless they may become known during lto/whopr. */
891 if (!cs->callee->analyzed && !flag_lto && !flag_whopr)
892 continue;
893 ipa_count_arguments (cs);
894 /* If the descriptor of the callee is not initialized yet, we have to do
895 it now. */
896 if (cs->callee->analyzed)
897 ipa_initialize_node_params (cs->callee);
898 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
899 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
900 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
901 ipa_compute_jump_functions_for_edge (parms_info, cs);
902 }
903
904 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
905 {
906 ipa_count_arguments (cs);
907 ipa_compute_jump_functions_for_edge (parms_info, cs);
908 }
909 }
910
911 /* If RHS looks like a rhs of a statement loading pfn from a member
912 pointer formal parameter, return the parameter, otherwise return
913 NULL. If USE_DELTA, then we look for a use of the delta field
914 rather than the pfn. */
915
916 static tree
917 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
918 {
919 tree rec, fld;
920 tree ptr_field;
921 tree delta_field;
922
923 if (TREE_CODE (rhs) != COMPONENT_REF)
924 return NULL_TREE;
925
926 rec = TREE_OPERAND (rhs, 0);
927 if (TREE_CODE (rec) != PARM_DECL
928 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
929 return NULL_TREE;
930
931 fld = TREE_OPERAND (rhs, 1);
932 if (use_delta ? (fld == delta_field) : (fld == ptr_field))
933 return rec;
934 else
935 return NULL_TREE;
936 }
937
938 /* If STMT looks like a statement loading a value from a member pointer formal
939 parameter, this function returns that parameter. */
940
941 static tree
942 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
943 {
944 tree rhs;
945
946 if (!gimple_assign_single_p (stmt))
947 return NULL_TREE;
948
949 rhs = gimple_assign_rhs1 (stmt);
950 return ipa_get_member_ptr_load_param (rhs, use_delta);
951 }
952
953 /* Returns true iff T is an SSA_NAME defined by a statement. */
954
955 static bool
956 ipa_is_ssa_with_stmt_def (tree t)
957 {
958 if (TREE_CODE (t) == SSA_NAME
959 && !SSA_NAME_IS_DEFAULT_DEF (t))
960 return true;
961 else
962 return false;
963 }
964
965 /* Find the indirect call graph edge corresponding to STMT and add to it all
966 information necessary to describe a call to a parameter number PARAM_INDEX.
967 NODE is the caller. POLYMORPHIC should be set to true iff the call is a
968 virtual one. */
969
970 static void
971 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt,
972 bool polymorphic)
973 {
974 struct cgraph_edge *cs;
975
976 cs = cgraph_edge (node, stmt);
977 cs->indirect_info->param_index = param_index;
978 cs->indirect_info->anc_offset = 0;
979 cs->indirect_info->polymorphic = polymorphic;
980 if (polymorphic)
981 {
982 tree otr = gimple_call_fn (stmt);
983 tree type, token = OBJ_TYPE_REF_TOKEN (otr);
984 cs->indirect_info->otr_token = tree_low_cst (token, 1);
985 type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (otr)));
986 cs->indirect_info->otr_type = type;
987 }
988 }
989
990 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
991 (described by INFO). PARMS_INFO is a pointer to a vector containing
992 intermediate information about each formal parameter. Currently it checks
993 whether the call calls a pointer that is a formal parameter and if so, the
994 parameter is marked with the called flag and an indirect call graph edge
995 describing the call is created. This is very simple for ordinary pointers
996 represented in SSA but not-so-nice when it comes to member pointers. The
997 ugly part of this function does nothing more than trying to match the
998 pattern of such a call. An example of such a pattern is the gimple dump
999 below, the call is on the last line:
1000
1001 <bb 2>:
1002 f$__delta_5 = f.__delta;
1003 f$__pfn_24 = f.__pfn;
1004
1005 ...
1006
1007 <bb 5>
1008 D.2496_3 = (int) f$__pfn_24;
1009 D.2497_4 = D.2496_3 & 1;
1010 if (D.2497_4 != 0)
1011 goto <bb 3>;
1012 else
1013 goto <bb 4>;
1014
1015 <bb 6>:
1016 D.2500_7 = (unsigned int) f$__delta_5;
1017 D.2501_8 = &S + D.2500_7;
1018 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1019 D.2503_10 = *D.2502_9;
1020 D.2504_12 = f$__pfn_24 + -1;
1021 D.2505_13 = (unsigned int) D.2504_12;
1022 D.2506_14 = D.2503_10 + D.2505_13;
1023 D.2507_15 = *D.2506_14;
1024 iftmp.11_16 = (String:: *) D.2507_15;
1025
1026 <bb 7>:
1027 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1028 D.2500_19 = (unsigned int) f$__delta_5;
1029 D.2508_20 = &S + D.2500_19;
1030 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1031
1032 Such patterns are results of simple calls to a member pointer:
1033
1034 int doprinting (int (MyString::* f)(int) const)
1035 {
1036 MyString S ("somestring");
1037
1038 return (S.*f)(4);
1039 }
1040 */
1041
1042 static void
1043 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1044 struct ipa_node_params *info,
1045 struct param_analysis_info *parms_info,
1046 gimple call, tree target)
1047 {
1048 gimple def;
1049 tree n1, n2;
1050 gimple d1, d2;
1051 tree rec, rec2, cond;
1052 gimple branch;
1053 int index;
1054 basic_block bb, virt_bb, join;
1055
1056 if (SSA_NAME_IS_DEFAULT_DEF (target))
1057 {
1058 tree var = SSA_NAME_VAR (target);
1059 index = ipa_get_param_decl_index (info, var);
1060 if (index >= 0)
1061 ipa_note_param_call (node, index, call, false);
1062 return;
1063 }
1064
1065 /* Now we need to try to match the complex pattern of calling a member
1066 pointer. */
1067
1068 if (!POINTER_TYPE_P (TREE_TYPE (target))
1069 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1070 return;
1071
1072 def = SSA_NAME_DEF_STMT (target);
1073 if (gimple_code (def) != GIMPLE_PHI)
1074 return;
1075
1076 if (gimple_phi_num_args (def) != 2)
1077 return;
1078
1079 /* First, we need to check whether one of these is a load from a member
1080 pointer that is a parameter to this function. */
1081 n1 = PHI_ARG_DEF (def, 0);
1082 n2 = PHI_ARG_DEF (def, 1);
1083 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1084 return;
1085 d1 = SSA_NAME_DEF_STMT (n1);
1086 d2 = SSA_NAME_DEF_STMT (n2);
1087
1088 join = gimple_bb (def);
1089 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1090 {
1091 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1092 return;
1093
1094 bb = EDGE_PRED (join, 0)->src;
1095 virt_bb = gimple_bb (d2);
1096 }
1097 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1098 {
1099 bb = EDGE_PRED (join, 1)->src;
1100 virt_bb = gimple_bb (d1);
1101 }
1102 else
1103 return;
1104
1105 /* Second, we need to check that the basic blocks are laid out in the way
1106 corresponding to the pattern. */
1107
1108 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1109 || single_pred (virt_bb) != bb
1110 || single_succ (virt_bb) != join)
1111 return;
1112
1113 /* Third, let's see that the branching is done depending on the least
1114 significant bit of the pfn. */
1115
1116 branch = last_stmt (bb);
1117 if (!branch || gimple_code (branch) != GIMPLE_COND)
1118 return;
1119
1120 if (gimple_cond_code (branch) != NE_EXPR
1121 || !integer_zerop (gimple_cond_rhs (branch)))
1122 return;
1123
1124 cond = gimple_cond_lhs (branch);
1125 if (!ipa_is_ssa_with_stmt_def (cond))
1126 return;
1127
1128 def = SSA_NAME_DEF_STMT (cond);
1129 if (!is_gimple_assign (def)
1130 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1131 || !integer_onep (gimple_assign_rhs2 (def)))
1132 return;
1133
1134 cond = gimple_assign_rhs1 (def);
1135 if (!ipa_is_ssa_with_stmt_def (cond))
1136 return;
1137
1138 def = SSA_NAME_DEF_STMT (cond);
1139
1140 if (is_gimple_assign (def)
1141 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1142 {
1143 cond = gimple_assign_rhs1 (def);
1144 if (!ipa_is_ssa_with_stmt_def (cond))
1145 return;
1146 def = SSA_NAME_DEF_STMT (cond);
1147 }
1148
1149 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1150 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1151 == ptrmemfunc_vbit_in_delta));
1152
1153 if (rec != rec2)
1154 return;
1155
1156 index = ipa_get_param_decl_index (info, rec);
1157 if (index >= 0 && !is_parm_modified_before_call (&parms_info[index],
1158 call, rec))
1159 ipa_note_param_call (node, index, call, false);
1160
1161 return;
1162 }
1163
1164 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1165 object referenced in the expression is a formal parameter of the caller
1166 (described by INFO), create a call note for the statement. */
1167
1168 static void
1169 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1170 struct ipa_node_params *info, gimple call,
1171 tree target)
1172 {
1173 tree obj = OBJ_TYPE_REF_OBJECT (target);
1174 tree var;
1175 int index;
1176
1177 if (TREE_CODE (obj) == ADDR_EXPR)
1178 {
1179 do
1180 {
1181 obj = TREE_OPERAND (obj, 0);
1182 }
1183 while (TREE_CODE (obj) == COMPONENT_REF);
1184 if (TREE_CODE (obj) != MEM_REF)
1185 return;
1186 obj = TREE_OPERAND (obj, 0);
1187 }
1188
1189 if (TREE_CODE (obj) != SSA_NAME
1190 || !SSA_NAME_IS_DEFAULT_DEF (obj))
1191 return;
1192
1193 var = SSA_NAME_VAR (obj);
1194 index = ipa_get_param_decl_index (info, var);
1195
1196 if (index >= 0)
1197 ipa_note_param_call (node, index, call, true);
1198 }
1199
1200 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1201 of the caller (described by INFO). PARMS_INFO is a pointer to a vector
1202 containing intermediate information about each formal parameter. */
1203
1204 static void
1205 ipa_analyze_call_uses (struct cgraph_node *node,
1206 struct ipa_node_params *info,
1207 struct param_analysis_info *parms_info, gimple call)
1208 {
1209 tree target = gimple_call_fn (call);
1210
1211 if (TREE_CODE (target) == SSA_NAME)
1212 ipa_analyze_indirect_call_uses (node, info, parms_info, call, target);
1213 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1214 ipa_analyze_virtual_call_uses (node, info, call, target);
1215 }
1216
1217
1218 /* Analyze the call statement STMT with respect to formal parameters (described
1219 in INFO) of caller given by NODE. Currently it only checks whether formal
1220 parameters are called. PARMS_INFO is a pointer to a vector containing
1221 intermediate information about each formal parameter. */
1222
1223 static void
1224 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1225 struct param_analysis_info *parms_info, gimple stmt)
1226 {
1227 if (is_gimple_call (stmt))
1228 ipa_analyze_call_uses (node, info, parms_info, stmt);
1229 }
1230
1231 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1232 If OP is a parameter declaration, mark it as used in the info structure
1233 passed in DATA. */
1234
1235 static bool
1236 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1237 tree op, void *data)
1238 {
1239 struct ipa_node_params *info = (struct ipa_node_params *) data;
1240
1241 op = get_base_address (op);
1242 if (op
1243 && TREE_CODE (op) == PARM_DECL)
1244 {
1245 int index = ipa_get_param_decl_index (info, op);
1246 gcc_assert (index >= 0);
1247 info->params[index].used = true;
1248 }
1249
1250 return false;
1251 }
1252
1253 /* Scan the function body of NODE and inspect the uses of formal parameters.
1254 Store the findings in various structures of the associated ipa_node_params
1255 structure, such as parameter flags, notes etc. PARMS_INFO is a pointer to a
1256 vector containing intermediate information about each formal parameter. */
1257
1258 static void
1259 ipa_analyze_params_uses (struct cgraph_node *node,
1260 struct param_analysis_info *parms_info)
1261 {
1262 tree decl = node->decl;
1263 basic_block bb;
1264 struct function *func;
1265 gimple_stmt_iterator gsi;
1266 struct ipa_node_params *info = IPA_NODE_REF (node);
1267 int i;
1268
1269 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1270 return;
1271
1272 for (i = 0; i < ipa_get_param_count (info); i++)
1273 {
1274 tree parm = ipa_get_param (info, i);
1275 /* For SSA regs see if parameter is used. For non-SSA we compute
1276 the flag during modification analysis. */
1277 if (is_gimple_reg (parm)
1278 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
1279 info->params[i].used = true;
1280 }
1281
1282 func = DECL_STRUCT_FUNCTION (decl);
1283 FOR_EACH_BB_FN (bb, func)
1284 {
1285 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1286 {
1287 gimple stmt = gsi_stmt (gsi);
1288
1289 if (is_gimple_debug (stmt))
1290 continue;
1291
1292 ipa_analyze_stmt_uses (node, info, parms_info, stmt);
1293 walk_stmt_load_store_addr_ops (stmt, info,
1294 visit_ref_for_mod_analysis,
1295 visit_ref_for_mod_analysis,
1296 visit_ref_for_mod_analysis);
1297 }
1298 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
1299 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1300 visit_ref_for_mod_analysis,
1301 visit_ref_for_mod_analysis,
1302 visit_ref_for_mod_analysis);
1303 }
1304
1305 info->uses_analysis_done = 1;
1306 }
1307
1308 /* Initialize the array describing properties of of formal parameters of NODE,
1309 analyze their uses and and compute jump functions associated witu actual
1310 arguments of calls from within NODE. */
1311
1312 void
1313 ipa_analyze_node (struct cgraph_node *node)
1314 {
1315 struct ipa_node_params *info = IPA_NODE_REF (node);
1316 struct param_analysis_info *parms_info;
1317 int i, param_count;
1318
1319 ipa_initialize_node_params (node);
1320
1321 param_count = ipa_get_param_count (info);
1322 parms_info = XALLOCAVEC (struct param_analysis_info, param_count);
1323 memset (parms_info, 0, sizeof (struct param_analysis_info) * param_count);
1324
1325 ipa_analyze_params_uses (node, parms_info);
1326 ipa_compute_jump_functions (node, parms_info);
1327
1328 for (i = 0; i < param_count; i++)
1329 if (parms_info[i].visited_statements)
1330 BITMAP_FREE (parms_info[i].visited_statements);
1331 }
1332
1333
1334 /* Update the jump function DST when the call graph edge correspondng to SRC is
1335 is being inlined, knowing that DST is of type ancestor and src of known
1336 type. */
1337
1338 static void
1339 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1340 struct ipa_jump_func *dst)
1341 {
1342 tree new_binfo;
1343
1344 new_binfo = get_binfo_at_offset (src->value.base_binfo,
1345 dst->value.ancestor.offset,
1346 dst->value.ancestor.type);
1347 if (new_binfo)
1348 {
1349 dst->type = IPA_JF_KNOWN_TYPE;
1350 dst->value.base_binfo = new_binfo;
1351 }
1352 else
1353 dst->type = IPA_JF_UNKNOWN;
1354 }
1355
1356 /* Update the jump functions associated with call graph edge E when the call
1357 graph edge CS is being inlined, assuming that E->caller is already (possibly
1358 indirectly) inlined into CS->callee and that E has not been inlined. */
1359
1360 static void
1361 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1362 struct cgraph_edge *e)
1363 {
1364 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1365 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1366 int count = ipa_get_cs_argument_count (args);
1367 int i;
1368
1369 for (i = 0; i < count; i++)
1370 {
1371 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1372
1373 if (dst->type == IPA_JF_ANCESTOR)
1374 {
1375 struct ipa_jump_func *src;
1376
1377 /* Variable number of arguments can cause havoc if we try to access
1378 one that does not exist in the inlined edge. So make sure we
1379 don't. */
1380 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1381 {
1382 dst->type = IPA_JF_UNKNOWN;
1383 continue;
1384 }
1385
1386 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1387 if (src->type == IPA_JF_KNOWN_TYPE)
1388 combine_known_type_and_ancestor_jfs (src, dst);
1389 else if (src->type == IPA_JF_CONST)
1390 {
1391 struct ipa_jump_func kt_func;
1392
1393 kt_func.type = IPA_JF_UNKNOWN;
1394 compute_known_type_jump_func (src->value.constant, &kt_func);
1395 if (kt_func.type == IPA_JF_KNOWN_TYPE)
1396 combine_known_type_and_ancestor_jfs (&kt_func, dst);
1397 else
1398 dst->type = IPA_JF_UNKNOWN;
1399 }
1400 else if (src->type == IPA_JF_PASS_THROUGH
1401 && src->value.pass_through.operation == NOP_EXPR)
1402 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1403 else if (src->type == IPA_JF_ANCESTOR)
1404 {
1405 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1406 dst->value.ancestor.offset += src->value.ancestor.offset;
1407 }
1408 else
1409 dst->type = IPA_JF_UNKNOWN;
1410 }
1411 else if (dst->type == IPA_JF_PASS_THROUGH)
1412 {
1413 struct ipa_jump_func *src;
1414 /* We must check range due to calls with variable number of arguments
1415 and we cannot combine jump functions with operations. */
1416 if (dst->value.pass_through.operation == NOP_EXPR
1417 && (dst->value.pass_through.formal_id
1418 < ipa_get_cs_argument_count (top)))
1419 {
1420 src = ipa_get_ith_jump_func (top,
1421 dst->value.pass_through.formal_id);
1422 *dst = *src;
1423 }
1424 else
1425 dst->type = IPA_JF_UNKNOWN;
1426 }
1427 }
1428 }
1429
1430 /* If TARGET is an addr_expr of a function declaration, make it the destination
1431 of an indirect edge IE and return the edge. Otherwise, return NULL. */
1432
1433 static struct cgraph_edge *
1434 make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
1435 {
1436 struct cgraph_node *callee;
1437
1438 if (TREE_CODE (target) != ADDR_EXPR)
1439 return NULL;
1440 target = TREE_OPERAND (target, 0);
1441 if (TREE_CODE (target) != FUNCTION_DECL)
1442 return NULL;
1443 callee = cgraph_node (target);
1444 if (!callee)
1445 return NULL;
1446
1447 cgraph_make_edge_direct (ie, callee);
1448 if (dump_file)
1449 {
1450 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1451 "(%s/%i -> %s/%i) for stmt ",
1452 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1453 cgraph_node_name (ie->caller), ie->caller->uid,
1454 cgraph_node_name (ie->callee), ie->callee->uid);
1455
1456 if (ie->call_stmt)
1457 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1458 else
1459 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1460 }
1461
1462 if (ipa_get_cs_argument_count (IPA_EDGE_REF (ie))
1463 != ipa_get_param_count (IPA_NODE_REF (callee)))
1464 ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));
1465
1466 return ie;
1467 }
1468
1469 /* Try to find a destination for indirect edge IE that corresponds to a simple
1470 call or a call of a member function pointer and where the destination is a
1471 pointer formal parameter described by jump function JFUNC. If it can be
1472 determined, return the newly direct edge, otherwise return NULL. */
1473
1474 static struct cgraph_edge *
1475 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1476 struct ipa_jump_func *jfunc)
1477 {
1478 tree target;
1479
1480 if (jfunc->type == IPA_JF_CONST)
1481 target = jfunc->value.constant;
1482 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1483 target = jfunc->value.member_cst.pfn;
1484 else
1485 return NULL;
1486
1487 return make_edge_direct_to_target (ie, target);
1488 }
1489
1490 /* Try to find a destination for indirect edge IE that corresponds to a
1491 virtuall call based on a formal parameter which is described by jump
1492 function JFUNC and if it can be determined, make it direct and return the
1493 direct edge. Otherwise, return NULL. */
1494
1495 static struct cgraph_edge *
1496 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1497 struct ipa_jump_func *jfunc)
1498 {
1499 tree binfo, type, target;
1500 HOST_WIDE_INT token;
1501
1502 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1503 binfo = jfunc->value.base_binfo;
1504 else if (jfunc->type == IPA_JF_CONST)
1505 {
1506 tree cst = jfunc->value.constant;
1507 if (TREE_CODE (cst) == ADDR_EXPR)
1508 binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (cst, 0),
1509 NULL_TREE);
1510 else
1511 return NULL;
1512 }
1513 else
1514 return NULL;
1515
1516 if (!binfo)
1517 return NULL;
1518
1519 token = ie->indirect_info->otr_token;
1520 type = ie->indirect_info->otr_type;
1521 binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
1522 if (binfo)
1523 target = gimple_fold_obj_type_ref_known_binfo (token, binfo);
1524 else
1525 return NULL;
1526
1527 if (target)
1528 return make_edge_direct_to_target (ie, target);
1529 else
1530 return NULL;
1531 }
1532
1533 /* Update the param called notes associated with NODE when CS is being inlined,
1534 assuming NODE is (potentially indirectly) inlined into CS->callee.
1535 Moreover, if the callee is discovered to be constant, create a new cgraph
1536 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1537 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1538
1539 static bool
1540 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1541 struct cgraph_node *node,
1542 VEC (cgraph_edge_p, heap) **new_edges)
1543 {
1544 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1545 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1546 bool res = false;
1547
1548 ipa_check_create_edge_args ();
1549
1550 for (ie = node->indirect_calls; ie; ie = next_ie)
1551 {
1552 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1553 struct ipa_jump_func *jfunc;
1554
1555 next_ie = ie->next_callee;
1556 if (bitmap_bit_p (iinlining_processed_edges, ie->uid))
1557 continue;
1558
1559 /* If we ever use indirect edges for anything other than indirect
1560 inlining, we will need to skip those with negative param_indices. */
1561 if (ici->param_index == -1)
1562 continue;
1563
1564 /* We must check range due to calls with variable number of arguments: */
1565 if (ici->param_index >= ipa_get_cs_argument_count (top))
1566 {
1567 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1568 continue;
1569 }
1570
1571 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1572 if (jfunc->type == IPA_JF_PASS_THROUGH
1573 && jfunc->value.pass_through.operation == NOP_EXPR)
1574 ici->param_index = jfunc->value.pass_through.formal_id;
1575 else if (jfunc->type == IPA_JF_ANCESTOR)
1576 {
1577 ici->param_index = jfunc->value.ancestor.formal_id;
1578 ici->anc_offset += jfunc->value.ancestor.offset;
1579 }
1580 else
1581 /* Either we can find a destination for this edge now or never. */
1582 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1583
1584 if (ici->polymorphic)
1585 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1586 else
1587 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1588
1589 if (new_direct_edge)
1590 {
1591 new_direct_edge->indirect_inlining_edge = 1;
1592 if (new_edges)
1593 {
1594 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1595 new_direct_edge);
1596 top = IPA_EDGE_REF (cs);
1597 res = true;
1598 }
1599 }
1600 }
1601
1602 return res;
1603 }
1604
1605 /* Recursively traverse subtree of NODE (including node) made of inlined
1606 cgraph_edges when CS has been inlined and invoke
1607 update_indirect_edges_after_inlining on all nodes and
1608 update_jump_functions_after_inlining on all non-inlined edges that lead out
1609 of this subtree. Newly discovered indirect edges will be added to
1610 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1611 created. */
1612
1613 static bool
1614 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1615 struct cgraph_node *node,
1616 VEC (cgraph_edge_p, heap) **new_edges)
1617 {
1618 struct cgraph_edge *e;
1619 bool res;
1620
1621 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1622
1623 for (e = node->callees; e; e = e->next_callee)
1624 if (!e->inline_failed)
1625 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1626 else
1627 update_jump_functions_after_inlining (cs, e);
1628
1629 return res;
1630 }
1631
1632 /* Update jump functions and call note functions on inlining the call site CS.
1633 CS is expected to lead to a node already cloned by
1634 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1635 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1636 created. */
1637
1638 bool
1639 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1640 VEC (cgraph_edge_p, heap) **new_edges)
1641 {
1642 /* FIXME lto: We do not stream out indirect call information. */
1643 if (flag_wpa)
1644 return false;
1645
1646 /* Do nothing if the preparation phase has not been carried out yet
1647 (i.e. during early inlining). */
1648 if (!ipa_node_params_vector)
1649 return false;
1650 gcc_assert (ipa_edge_args_vector);
1651
1652 return propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1653 }
1654
1655 /* Frees all dynamically allocated structures that the argument info points
1656 to. */
1657
1658 void
1659 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1660 {
1661 if (args->jump_functions)
1662 ggc_free (args->jump_functions);
1663
1664 memset (args, 0, sizeof (*args));
1665 }
1666
1667 /* Free all ipa_edge structures. */
1668
1669 void
1670 ipa_free_all_edge_args (void)
1671 {
1672 int i;
1673 struct ipa_edge_args *args;
1674
1675 for (i = 0;
1676 VEC_iterate (ipa_edge_args_t, ipa_edge_args_vector, i, args);
1677 i++)
1678 ipa_free_edge_args_substructures (args);
1679
1680 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
1681 ipa_edge_args_vector = NULL;
1682 }
1683
1684 /* Frees all dynamically allocated structures that the param info points
1685 to. */
1686
1687 void
1688 ipa_free_node_params_substructures (struct ipa_node_params *info)
1689 {
1690 if (info->params)
1691 free (info->params);
1692
1693 memset (info, 0, sizeof (*info));
1694 }
1695
1696 /* Free all ipa_node_params structures. */
1697
1698 void
1699 ipa_free_all_node_params (void)
1700 {
1701 int i;
1702 struct ipa_node_params *info;
1703
1704 for (i = 0;
1705 VEC_iterate (ipa_node_params_t, ipa_node_params_vector, i, info);
1706 i++)
1707 ipa_free_node_params_substructures (info);
1708
1709 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
1710 ipa_node_params_vector = NULL;
1711 }
1712
1713 /* Hook that is called by cgraph.c when an edge is removed. */
1714
1715 static void
1716 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
1717 {
1718 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1719 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
1720 <= (unsigned)cs->uid)
1721 return;
1722 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1723 }
1724
1725 /* Hook that is called by cgraph.c when a node is removed. */
1726
1727 static void
1728 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1729 {
1730 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1731 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
1732 <= (unsigned)node->uid)
1733 return;
1734 ipa_free_node_params_substructures (IPA_NODE_REF (node));
1735 }
1736
1737 /* Helper function to duplicate an array of size N that is at SRC and store a
1738 pointer to it to DST. Nothing is done if SRC is NULL. */
1739
1740 static void *
1741 duplicate_array (void *src, size_t n)
1742 {
1743 void *p;
1744
1745 if (!src)
1746 return NULL;
1747
1748 p = xmalloc (n);
1749 memcpy (p, src, n);
1750 return p;
1751 }
1752
1753 static struct ipa_jump_func *
1754 duplicate_ipa_jump_func_array (const struct ipa_jump_func * src, size_t n)
1755 {
1756 struct ipa_jump_func *p;
1757
1758 if (!src)
1759 return NULL;
1760
1761 p = ggc_alloc_vec_ipa_jump_func (n);
1762 memcpy (p, src, n * sizeof (struct ipa_jump_func));
1763 return p;
1764 }
1765
1766 /* Hook that is called by cgraph.c when a node is duplicated. */
1767
1768 static void
1769 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
1770 __attribute__((unused)) void *data)
1771 {
1772 struct ipa_edge_args *old_args, *new_args;
1773 int arg_count;
1774
1775 ipa_check_create_edge_args ();
1776
1777 old_args = IPA_EDGE_REF (src);
1778 new_args = IPA_EDGE_REF (dst);
1779
1780 arg_count = ipa_get_cs_argument_count (old_args);
1781 ipa_set_cs_argument_count (new_args, arg_count);
1782 new_args->jump_functions =
1783 duplicate_ipa_jump_func_array (old_args->jump_functions, arg_count);
1784
1785 if (iinlining_processed_edges
1786 && bitmap_bit_p (iinlining_processed_edges, src->uid))
1787 bitmap_set_bit (iinlining_processed_edges, dst->uid);
1788 }
1789
1790 /* Hook that is called by cgraph.c when a node is duplicated. */
1791
1792 static void
1793 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
1794 __attribute__((unused)) void *data)
1795 {
1796 struct ipa_node_params *old_info, *new_info;
1797 int param_count;
1798
1799 ipa_check_create_node_params ();
1800 old_info = IPA_NODE_REF (src);
1801 new_info = IPA_NODE_REF (dst);
1802 param_count = ipa_get_param_count (old_info);
1803
1804 ipa_set_param_count (new_info, param_count);
1805 new_info->params = (struct ipa_param_descriptor *)
1806 duplicate_array (old_info->params,
1807 sizeof (struct ipa_param_descriptor) * param_count);
1808 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
1809 new_info->count_scale = old_info->count_scale;
1810 }
1811
1812 /* Register our cgraph hooks if they are not already there. */
1813
1814 void
1815 ipa_register_cgraph_hooks (void)
1816 {
1817 if (!edge_removal_hook_holder)
1818 edge_removal_hook_holder =
1819 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
1820 if (!node_removal_hook_holder)
1821 node_removal_hook_holder =
1822 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
1823 if (!edge_duplication_hook_holder)
1824 edge_duplication_hook_holder =
1825 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
1826 if (!node_duplication_hook_holder)
1827 node_duplication_hook_holder =
1828 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
1829 }
1830
1831 /* Unregister our cgraph hooks if they are not already there. */
1832
1833 static void
1834 ipa_unregister_cgraph_hooks (void)
1835 {
1836 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
1837 edge_removal_hook_holder = NULL;
1838 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1839 node_removal_hook_holder = NULL;
1840 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
1841 edge_duplication_hook_holder = NULL;
1842 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1843 node_duplication_hook_holder = NULL;
1844 }
1845
1846 /* Allocate all necessary data strucutures necessary for indirect inlining. */
1847
1848 void
1849 ipa_create_all_structures_for_iinln (void)
1850 {
1851 iinlining_processed_edges = BITMAP_ALLOC (NULL);
1852 }
1853
1854 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1855 longer needed after ipa-cp. */
1856
1857 void
1858 ipa_free_all_structures_after_ipa_cp (void)
1859 {
1860 if (!flag_indirect_inlining)
1861 {
1862 ipa_free_all_edge_args ();
1863 ipa_free_all_node_params ();
1864 ipa_unregister_cgraph_hooks ();
1865 }
1866 }
1867
1868 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1869 longer needed after indirect inlining. */
1870
1871 void
1872 ipa_free_all_structures_after_iinln (void)
1873 {
1874 BITMAP_FREE (iinlining_processed_edges);
1875
1876 ipa_free_all_edge_args ();
1877 ipa_free_all_node_params ();
1878 ipa_unregister_cgraph_hooks ();
1879 }
1880
1881 /* Print ipa_tree_map data structures of all functions in the
1882 callgraph to F. */
1883
1884 void
1885 ipa_print_node_params (FILE * f, struct cgraph_node *node)
1886 {
1887 int i, count;
1888 tree temp;
1889 struct ipa_node_params *info;
1890
1891 if (!node->analyzed)
1892 return;
1893 info = IPA_NODE_REF (node);
1894 fprintf (f, " function %s parameter descriptors:\n",
1895 cgraph_node_name (node));
1896 count = ipa_get_param_count (info);
1897 for (i = 0; i < count; i++)
1898 {
1899 temp = ipa_get_param (info, i);
1900 if (TREE_CODE (temp) == PARM_DECL)
1901 fprintf (f, " param %d : %s", i,
1902 (DECL_NAME (temp)
1903 ? (*lang_hooks.decl_printable_name) (temp, 2)
1904 : "(unnamed)"));
1905 if (ipa_is_param_used (info, i))
1906 fprintf (f, " used");
1907 fprintf (f, "\n");
1908 }
1909 }
1910
1911 /* Print ipa_tree_map data structures of all functions in the
1912 callgraph to F. */
1913
1914 void
1915 ipa_print_all_params (FILE * f)
1916 {
1917 struct cgraph_node *node;
1918
1919 fprintf (f, "\nFunction parameters:\n");
1920 for (node = cgraph_nodes; node; node = node->next)
1921 ipa_print_node_params (f, node);
1922 }
1923
1924 /* Return a heap allocated vector containing formal parameters of FNDECL. */
1925
1926 VEC(tree, heap) *
1927 ipa_get_vector_of_formal_parms (tree fndecl)
1928 {
1929 VEC(tree, heap) *args;
1930 int count;
1931 tree parm;
1932
1933 count = count_formal_params_1 (fndecl);
1934 args = VEC_alloc (tree, heap, count);
1935 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
1936 VEC_quick_push (tree, args, parm);
1937
1938 return args;
1939 }
1940
1941 /* Return a heap allocated vector containing types of formal parameters of
1942 function type FNTYPE. */
1943
1944 static inline VEC(tree, heap) *
1945 get_vector_of_formal_parm_types (tree fntype)
1946 {
1947 VEC(tree, heap) *types;
1948 int count = 0;
1949 tree t;
1950
1951 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
1952 count++;
1953
1954 types = VEC_alloc (tree, heap, count);
1955 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
1956 VEC_quick_push (tree, types, TREE_VALUE (t));
1957
1958 return types;
1959 }
1960
1961 /* Modify the function declaration FNDECL and its type according to the plan in
1962 ADJUSTMENTS. It also sets base fields of individual adjustments structures
1963 to reflect the actual parameters being modified which are determined by the
1964 base_index field. */
1965
1966 void
1967 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
1968 const char *synth_parm_prefix)
1969 {
1970 VEC(tree, heap) *oparms, *otypes;
1971 tree orig_type, new_type = NULL;
1972 tree old_arg_types, t, new_arg_types = NULL;
1973 tree parm, *link = &DECL_ARGUMENTS (fndecl);
1974 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
1975 tree new_reversed = NULL;
1976 bool care_for_types, last_parm_void;
1977
1978 if (!synth_parm_prefix)
1979 synth_parm_prefix = "SYNTH";
1980
1981 oparms = ipa_get_vector_of_formal_parms (fndecl);
1982 orig_type = TREE_TYPE (fndecl);
1983 old_arg_types = TYPE_ARG_TYPES (orig_type);
1984
1985 /* The following test is an ugly hack, some functions simply don't have any
1986 arguments in their type. This is probably a bug but well... */
1987 care_for_types = (old_arg_types != NULL_TREE);
1988 if (care_for_types)
1989 {
1990 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
1991 == void_type_node);
1992 otypes = get_vector_of_formal_parm_types (orig_type);
1993 if (last_parm_void)
1994 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
1995 else
1996 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
1997 }
1998 else
1999 {
2000 last_parm_void = false;
2001 otypes = NULL;
2002 }
2003
2004 for (i = 0; i < len; i++)
2005 {
2006 struct ipa_parm_adjustment *adj;
2007 gcc_assert (link);
2008
2009 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2010 parm = VEC_index (tree, oparms, adj->base_index);
2011 adj->base = parm;
2012
2013 if (adj->copy_param)
2014 {
2015 if (care_for_types)
2016 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2017 adj->base_index),
2018 new_arg_types);
2019 *link = parm;
2020 link = &DECL_CHAIN (parm);
2021 }
2022 else if (!adj->remove_param)
2023 {
2024 tree new_parm;
2025 tree ptype;
2026
2027 if (adj->by_ref)
2028 ptype = build_pointer_type (adj->type);
2029 else
2030 ptype = adj->type;
2031
2032 if (care_for_types)
2033 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2034
2035 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2036 ptype);
2037 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2038
2039 DECL_ARTIFICIAL (new_parm) = 1;
2040 DECL_ARG_TYPE (new_parm) = ptype;
2041 DECL_CONTEXT (new_parm) = fndecl;
2042 TREE_USED (new_parm) = 1;
2043 DECL_IGNORED_P (new_parm) = 1;
2044 layout_decl (new_parm, 0);
2045
2046 add_referenced_var (new_parm);
2047 mark_sym_for_renaming (new_parm);
2048 adj->base = parm;
2049 adj->reduction = new_parm;
2050
2051 *link = new_parm;
2052
2053 link = &DECL_CHAIN (new_parm);
2054 }
2055 }
2056
2057 *link = NULL_TREE;
2058
2059 if (care_for_types)
2060 {
2061 new_reversed = nreverse (new_arg_types);
2062 if (last_parm_void)
2063 {
2064 if (new_reversed)
2065 TREE_CHAIN (new_arg_types) = void_list_node;
2066 else
2067 new_reversed = void_list_node;
2068 }
2069 }
2070
2071 /* Use copy_node to preserve as much as possible from original type
2072 (debug info, attribute lists etc.)
2073 Exception is METHOD_TYPEs must have THIS argument.
2074 When we are asked to remove it, we need to build new FUNCTION_TYPE
2075 instead. */
2076 if (TREE_CODE (orig_type) != METHOD_TYPE
2077 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2078 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2079 {
2080 new_type = build_distinct_type_copy (orig_type);
2081 TYPE_ARG_TYPES (new_type) = new_reversed;
2082 }
2083 else
2084 {
2085 new_type
2086 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2087 new_reversed));
2088 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2089 DECL_VINDEX (fndecl) = NULL_TREE;
2090 }
2091
2092 /* When signature changes, we need to clear builtin info. */
2093 if (DECL_BUILT_IN (fndecl))
2094 {
2095 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2096 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2097 }
2098
2099 /* This is a new type, not a copy of an old type. Need to reassociate
2100 variants. We can handle everything except the main variant lazily. */
2101 t = TYPE_MAIN_VARIANT (orig_type);
2102 if (orig_type != t)
2103 {
2104 TYPE_MAIN_VARIANT (new_type) = t;
2105 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2106 TYPE_NEXT_VARIANT (t) = new_type;
2107 }
2108 else
2109 {
2110 TYPE_MAIN_VARIANT (new_type) = new_type;
2111 TYPE_NEXT_VARIANT (new_type) = NULL;
2112 }
2113
2114 TREE_TYPE (fndecl) = new_type;
2115 if (otypes)
2116 VEC_free (tree, heap, otypes);
2117 VEC_free (tree, heap, oparms);
2118 }
2119
2120 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2121 If this is a directly recursive call, CS must be NULL. Otherwise it must
2122 contain the corresponding call graph edge. */
2123
2124 void
2125 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2126 ipa_parm_adjustment_vec adjustments)
2127 {
2128 VEC(tree, heap) *vargs;
2129 gimple new_stmt;
2130 gimple_stmt_iterator gsi;
2131 tree callee_decl;
2132 int i, len;
2133
2134 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2135 vargs = VEC_alloc (tree, heap, len);
2136
2137 gsi = gsi_for_stmt (stmt);
2138 for (i = 0; i < len; i++)
2139 {
2140 struct ipa_parm_adjustment *adj;
2141
2142 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2143
2144 if (adj->copy_param)
2145 {
2146 tree arg = gimple_call_arg (stmt, adj->base_index);
2147
2148 VEC_quick_push (tree, vargs, arg);
2149 }
2150 else if (!adj->remove_param)
2151 {
2152 tree expr, orig_expr;
2153 bool allow_ptr, repl_found;
2154
2155 orig_expr = expr = gimple_call_arg (stmt, adj->base_index);
2156 if (TREE_CODE (expr) == ADDR_EXPR)
2157 {
2158 allow_ptr = false;
2159 expr = TREE_OPERAND (expr, 0);
2160 }
2161 else
2162 allow_ptr = true;
2163
2164 repl_found = build_ref_for_offset (&expr, TREE_TYPE (expr),
2165 adj->offset, adj->type,
2166 allow_ptr);
2167 if (repl_found)
2168 {
2169 if (adj->by_ref)
2170 expr = build_fold_addr_expr (expr);
2171 }
2172 else
2173 {
2174 tree ptrtype = build_pointer_type (adj->type);
2175 expr = orig_expr;
2176 if (!POINTER_TYPE_P (TREE_TYPE (expr)))
2177 expr = build_fold_addr_expr (expr);
2178 if (!useless_type_conversion_p (ptrtype, TREE_TYPE (expr)))
2179 expr = fold_convert (ptrtype, expr);
2180 expr = fold_build2 (POINTER_PLUS_EXPR, ptrtype, expr,
2181 build_int_cst (sizetype,
2182 adj->offset / BITS_PER_UNIT));
2183 if (!adj->by_ref)
2184 expr = fold_build1 (INDIRECT_REF, adj->type, expr);
2185 }
2186 expr = force_gimple_operand_gsi (&gsi, expr,
2187 adj->by_ref
2188 || is_gimple_reg_type (adj->type),
2189 NULL, true, GSI_SAME_STMT);
2190 VEC_quick_push (tree, vargs, expr);
2191 }
2192 }
2193
2194 if (dump_file && (dump_flags & TDF_DETAILS))
2195 {
2196 fprintf (dump_file, "replacing stmt:");
2197 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2198 }
2199
2200 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2201 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2202 VEC_free (tree, heap, vargs);
2203 if (gimple_call_lhs (stmt))
2204 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2205
2206 gimple_set_block (new_stmt, gimple_block (stmt));
2207 if (gimple_has_location (stmt))
2208 gimple_set_location (new_stmt, gimple_location (stmt));
2209 gimple_call_copy_flags (new_stmt, stmt);
2210 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2211
2212 if (dump_file && (dump_flags & TDF_DETAILS))
2213 {
2214 fprintf (dump_file, "with stmt:");
2215 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2216 fprintf (dump_file, "\n");
2217 }
2218 gsi_replace (&gsi, new_stmt, true);
2219 if (cs)
2220 cgraph_set_call_stmt (cs, new_stmt);
2221 update_ssa (TODO_update_ssa);
2222 free_dominance_info (CDI_DOMINATORS);
2223 }
2224
2225 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2226
2227 static bool
2228 index_in_adjustments_multiple_times_p (int base_index,
2229 ipa_parm_adjustment_vec adjustments)
2230 {
2231 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2232 bool one = false;
2233
2234 for (i = 0; i < len; i++)
2235 {
2236 struct ipa_parm_adjustment *adj;
2237 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2238
2239 if (adj->base_index == base_index)
2240 {
2241 if (one)
2242 return true;
2243 else
2244 one = true;
2245 }
2246 }
2247 return false;
2248 }
2249
2250
2251 /* Return adjustments that should have the same effect on function parameters
2252 and call arguments as if they were first changed according to adjustments in
2253 INNER and then by adjustments in OUTER. */
2254
2255 ipa_parm_adjustment_vec
2256 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2257 ipa_parm_adjustment_vec outer)
2258 {
2259 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2260 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2261 int removals = 0;
2262 ipa_parm_adjustment_vec adjustments, tmp;
2263
2264 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2265 for (i = 0; i < inlen; i++)
2266 {
2267 struct ipa_parm_adjustment *n;
2268 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2269
2270 if (n->remove_param)
2271 removals++;
2272 else
2273 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2274 }
2275
2276 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2277 for (i = 0; i < outlen; i++)
2278 {
2279 struct ipa_parm_adjustment *r;
2280 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2281 outer, i);
2282 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2283 out->base_index);
2284
2285 gcc_assert (!in->remove_param);
2286 if (out->remove_param)
2287 {
2288 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2289 {
2290 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2291 memset (r, 0, sizeof (*r));
2292 r->remove_param = true;
2293 }
2294 continue;
2295 }
2296
2297 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2298 memset (r, 0, sizeof (*r));
2299 r->base_index = in->base_index;
2300 r->type = out->type;
2301
2302 /* FIXME: Create nonlocal value too. */
2303
2304 if (in->copy_param && out->copy_param)
2305 r->copy_param = true;
2306 else if (in->copy_param)
2307 r->offset = out->offset;
2308 else if (out->copy_param)
2309 r->offset = in->offset;
2310 else
2311 r->offset = in->offset + out->offset;
2312 }
2313
2314 for (i = 0; i < inlen; i++)
2315 {
2316 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2317 inner, i);
2318
2319 if (n->remove_param)
2320 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2321 }
2322
2323 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2324 return adjustments;
2325 }
2326
2327 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2328 friendly way, assuming they are meant to be applied to FNDECL. */
2329
2330 void
2331 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2332 tree fndecl)
2333 {
2334 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2335 bool first = true;
2336 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2337
2338 fprintf (file, "IPA param adjustments: ");
2339 for (i = 0; i < len; i++)
2340 {
2341 struct ipa_parm_adjustment *adj;
2342 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2343
2344 if (!first)
2345 fprintf (file, " ");
2346 else
2347 first = false;
2348
2349 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2350 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2351 if (adj->base)
2352 {
2353 fprintf (file, ", base: ");
2354 print_generic_expr (file, adj->base, 0);
2355 }
2356 if (adj->reduction)
2357 {
2358 fprintf (file, ", reduction: ");
2359 print_generic_expr (file, adj->reduction, 0);
2360 }
2361 if (adj->new_ssa_base)
2362 {
2363 fprintf (file, ", new_ssa_base: ");
2364 print_generic_expr (file, adj->new_ssa_base, 0);
2365 }
2366
2367 if (adj->copy_param)
2368 fprintf (file, ", copy_param");
2369 else if (adj->remove_param)
2370 fprintf (file, ", remove_param");
2371 else
2372 fprintf (file, ", offset %li", (long) adj->offset);
2373 if (adj->by_ref)
2374 fprintf (file, ", by_ref");
2375 print_node_brief (file, ", type: ", adj->type, 0);
2376 fprintf (file, "\n");
2377 }
2378 VEC_free (tree, heap, parms);
2379 }
2380
2381 /* Stream out jump function JUMP_FUNC to OB. */
2382
2383 static void
2384 ipa_write_jump_function (struct output_block *ob,
2385 struct ipa_jump_func *jump_func)
2386 {
2387 lto_output_uleb128_stream (ob->main_stream,
2388 jump_func->type);
2389
2390 switch (jump_func->type)
2391 {
2392 case IPA_JF_UNKNOWN:
2393 break;
2394 case IPA_JF_KNOWN_TYPE:
2395 lto_output_tree (ob, jump_func->value.base_binfo, true);
2396 break;
2397 case IPA_JF_CONST:
2398 lto_output_tree (ob, jump_func->value.constant, true);
2399 break;
2400 case IPA_JF_PASS_THROUGH:
2401 lto_output_tree (ob, jump_func->value.pass_through.operand, true);
2402 lto_output_uleb128_stream (ob->main_stream,
2403 jump_func->value.pass_through.formal_id);
2404 lto_output_uleb128_stream (ob->main_stream,
2405 jump_func->value.pass_through.operation);
2406 break;
2407 case IPA_JF_ANCESTOR:
2408 lto_output_uleb128_stream (ob->main_stream,
2409 jump_func->value.ancestor.offset);
2410 lto_output_tree (ob, jump_func->value.ancestor.type, true);
2411 lto_output_uleb128_stream (ob->main_stream,
2412 jump_func->value.ancestor.formal_id);
2413 break;
2414 case IPA_JF_CONST_MEMBER_PTR:
2415 lto_output_tree (ob, jump_func->value.member_cst.pfn, true);
2416 lto_output_tree (ob, jump_func->value.member_cst.delta, false);
2417 break;
2418 }
2419 }
2420
2421 /* Read in jump function JUMP_FUNC from IB. */
2422
2423 static void
2424 ipa_read_jump_function (struct lto_input_block *ib,
2425 struct ipa_jump_func *jump_func,
2426 struct data_in *data_in)
2427 {
2428 jump_func->type = (enum jump_func_type) lto_input_uleb128 (ib);
2429
2430 switch (jump_func->type)
2431 {
2432 case IPA_JF_UNKNOWN:
2433 break;
2434 case IPA_JF_KNOWN_TYPE:
2435 jump_func->value.base_binfo = lto_input_tree (ib, data_in);
2436 break;
2437 case IPA_JF_CONST:
2438 jump_func->value.constant = lto_input_tree (ib, data_in);
2439 break;
2440 case IPA_JF_PASS_THROUGH:
2441 jump_func->value.pass_through.operand = lto_input_tree (ib, data_in);
2442 jump_func->value.pass_through.formal_id = lto_input_uleb128 (ib);
2443 jump_func->value.pass_through.operation = (enum tree_code) lto_input_uleb128 (ib);
2444 break;
2445 case IPA_JF_ANCESTOR:
2446 jump_func->value.ancestor.offset = lto_input_uleb128 (ib);
2447 jump_func->value.ancestor.type = lto_input_tree (ib, data_in);
2448 jump_func->value.ancestor.formal_id = lto_input_uleb128 (ib);
2449 break;
2450 case IPA_JF_CONST_MEMBER_PTR:
2451 jump_func->value.member_cst.pfn = lto_input_tree (ib, data_in);
2452 jump_func->value.member_cst.delta = lto_input_tree (ib, data_in);
2453 break;
2454 }
2455 }
2456
2457 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2458 relevant to indirect inlining to OB. */
2459
2460 static void
2461 ipa_write_indirect_edge_info (struct output_block *ob,
2462 struct cgraph_edge *cs)
2463 {
2464 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465 struct bitpack_d bp;
2466
2467 lto_output_sleb128_stream (ob->main_stream, ii->param_index);
2468 lto_output_sleb128_stream (ob->main_stream, ii->anc_offset);
2469 bp = bitpack_create (ob->main_stream);
2470 bp_pack_value (&bp, ii->polymorphic, 1);
2471 lto_output_bitpack (&bp);
2472
2473 if (ii->polymorphic)
2474 {
2475 lto_output_sleb128_stream (ob->main_stream, ii->otr_token);
2476 lto_output_tree (ob, ii->otr_type, true);
2477 }
2478 }
2479
2480 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2481 relevant to indirect inlining from IB. */
2482
2483 static void
2484 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2485 struct data_in *data_in ATTRIBUTE_UNUSED,
2486 struct cgraph_edge *cs)
2487 {
2488 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2489 struct bitpack_d bp;
2490
2491 ii->param_index = (int) lto_input_sleb128 (ib);
2492 ii->anc_offset = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2493 bp = lto_input_bitpack (ib);
2494 ii->polymorphic = bp_unpack_value (&bp, 1);
2495 if (ii->polymorphic)
2496 {
2497 ii->otr_token = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2498 ii->otr_type = lto_input_tree (ib, data_in);
2499 }
2500 }
2501
2502 /* Stream out NODE info to OB. */
2503
2504 static void
2505 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2506 {
2507 int node_ref;
2508 lto_cgraph_encoder_t encoder;
2509 struct ipa_node_params *info = IPA_NODE_REF (node);
2510 int j;
2511 struct cgraph_edge *e;
2512 struct bitpack_d bp;
2513
2514 encoder = ob->decl_state->cgraph_node_encoder;
2515 node_ref = lto_cgraph_encoder_encode (encoder, node);
2516 lto_output_uleb128_stream (ob->main_stream, node_ref);
2517
2518 bp = bitpack_create (ob->main_stream);
2519 bp_pack_value (&bp, info->called_with_var_arguments, 1);
2520 gcc_assert (info->uses_analysis_done
2521 || ipa_get_param_count (info) == 0);
2522 gcc_assert (!info->node_enqueued);
2523 gcc_assert (!info->ipcp_orig_node);
2524 for (j = 0; j < ipa_get_param_count (info); j++)
2525 bp_pack_value (&bp, info->params[j].used, 1);
2526 lto_output_bitpack (&bp);
2527 for (e = node->callees; e; e = e->next_callee)
2528 {
2529 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2530
2531 lto_output_uleb128_stream (ob->main_stream,
2532 ipa_get_cs_argument_count (args));
2533 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2534 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2535 }
2536 for (e = node->indirect_calls; e; e = e->next_callee)
2537 ipa_write_indirect_edge_info (ob, e);
2538 }
2539
2540 /* Srtream in NODE info from IB. */
2541
2542 static void
2543 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2544 struct data_in *data_in)
2545 {
2546 struct ipa_node_params *info = IPA_NODE_REF (node);
2547 int k;
2548 struct cgraph_edge *e;
2549 struct bitpack_d bp;
2550
2551 ipa_initialize_node_params (node);
2552
2553 bp = lto_input_bitpack (ib);
2554 info->called_with_var_arguments = bp_unpack_value (&bp, 1);
2555 if (ipa_get_param_count (info) != 0)
2556 info->uses_analysis_done = true;
2557 info->node_enqueued = false;
2558 for (k = 0; k < ipa_get_param_count (info); k++)
2559 info->params[k].used = bp_unpack_value (&bp, 1);
2560 for (e = node->callees; e; e = e->next_callee)
2561 {
2562 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2563 int count = lto_input_uleb128 (ib);
2564
2565 ipa_set_cs_argument_count (args, count);
2566 if (!count)
2567 continue;
2568
2569 args->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
2570 (ipa_get_cs_argument_count (args));
2571 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2572 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2573 }
2574 for (e = node->indirect_calls; e; e = e->next_callee)
2575 ipa_read_indirect_edge_info (ib, data_in, e);
2576 }
2577
2578 /* Write jump functions for nodes in SET. */
2579
2580 void
2581 ipa_prop_write_jump_functions (cgraph_node_set set)
2582 {
2583 struct cgraph_node *node;
2584 struct output_block *ob = create_output_block (LTO_section_jump_functions);
2585 unsigned int count = 0;
2586 cgraph_node_set_iterator csi;
2587
2588 ob->cgraph_node = NULL;
2589
2590 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2591 {
2592 node = csi_node (csi);
2593 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2594 count++;
2595 }
2596
2597 lto_output_uleb128_stream (ob->main_stream, count);
2598
2599 /* Process all of the functions. */
2600 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2601 {
2602 node = csi_node (csi);
2603 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2604 ipa_write_node_info (ob, node);
2605 }
2606 lto_output_1_stream (ob->main_stream, 0);
2607 produce_asm (ob, NULL);
2608 destroy_output_block (ob);
2609 }
2610
2611 /* Read section in file FILE_DATA of length LEN with data DATA. */
2612
2613 static void
2614 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
2615 size_t len)
2616 {
2617 const struct lto_function_header *header =
2618 (const struct lto_function_header *) data;
2619 const int32_t cfg_offset = sizeof (struct lto_function_header);
2620 const int32_t main_offset = cfg_offset + header->cfg_size;
2621 const int32_t string_offset = main_offset + header->main_size;
2622 struct data_in *data_in;
2623 struct lto_input_block ib_main;
2624 unsigned int i;
2625 unsigned int count;
2626
2627 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2628 header->main_size);
2629
2630 data_in =
2631 lto_data_in_create (file_data, (const char *) data + string_offset,
2632 header->string_size, NULL);
2633 count = lto_input_uleb128 (&ib_main);
2634
2635 for (i = 0; i < count; i++)
2636 {
2637 unsigned int index;
2638 struct cgraph_node *node;
2639 lto_cgraph_encoder_t encoder;
2640
2641 index = lto_input_uleb128 (&ib_main);
2642 encoder = file_data->cgraph_node_encoder;
2643 node = lto_cgraph_encoder_deref (encoder, index);
2644 gcc_assert (node->analyzed);
2645 ipa_read_node_info (&ib_main, node, data_in);
2646 }
2647 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
2648 len);
2649 lto_data_in_delete (data_in);
2650 }
2651
2652 /* Read ipcp jump functions. */
2653
2654 void
2655 ipa_prop_read_jump_functions (void)
2656 {
2657 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2658 struct lto_file_decl_data *file_data;
2659 unsigned int j = 0;
2660
2661 ipa_check_create_node_params ();
2662 ipa_check_create_edge_args ();
2663 ipa_register_cgraph_hooks ();
2664
2665 while ((file_data = file_data_vec[j++]))
2666 {
2667 size_t len;
2668 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
2669
2670 if (data)
2671 ipa_prop_read_section (file_data, data, len);
2672 }
2673 }
2674
2675 /* After merging units, we can get mismatch in argument counts.
2676 Also decl merging might've rendered parameter lists obsolette.
2677 Also compute called_with_variable_arg info. */
2678
2679 void
2680 ipa_update_after_lto_read (void)
2681 {
2682 struct cgraph_node *node;
2683 struct cgraph_edge *cs;
2684
2685 ipa_check_create_node_params ();
2686 ipa_check_create_edge_args ();
2687
2688 for (node = cgraph_nodes; node; node = node->next)
2689 if (node->analyzed)
2690 ipa_initialize_node_params (node);
2691
2692 for (node = cgraph_nodes; node; node = node->next)
2693 if (node->analyzed)
2694 for (cs = node->callees; cs; cs = cs->next_callee)
2695 {
2696 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
2697 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
2698 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
2699 }
2700 }