* jvspec.c (jvgenmain_spec): Don't handle -fnew-verifier.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "timevar.h"
36 #include "flags.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
41
42
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
45
46 struct param_analysis_info
47 {
48 bool modified;
49 bitmap visited_statements;
50 };
51
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
56
57 /* Bitmap with all UIDs of call graph edges that have been already processed
58 by indirect inlining. */
59 static bitmap iinlining_processed_edges;
60
61 /* Holders of ipa cgraph hooks: */
62 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
63 static struct cgraph_node_hook_list *node_removal_hook_holder;
64 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
65 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
66
67 /* Add cgraph NODE described by INFO to the worklist WL regardless of whether
68 it is in one or not. It should almost never be used directly, as opposed to
69 ipa_push_func_to_list. */
70
71 void
72 ipa_push_func_to_list_1 (struct ipa_func_list **wl,
73 struct cgraph_node *node,
74 struct ipa_node_params *info)
75 {
76 struct ipa_func_list *temp;
77
78 info->node_enqueued = 1;
79 temp = XCNEW (struct ipa_func_list);
80 temp->node = node;
81 temp->next = *wl;
82 *wl = temp;
83 }
84
85 /* Initialize worklist to contain all functions. */
86
87 struct ipa_func_list *
88 ipa_init_func_list (void)
89 {
90 struct cgraph_node *node;
91 struct ipa_func_list * wl;
92
93 wl = NULL;
94 for (node = cgraph_nodes; node; node = node->next)
95 if (node->analyzed)
96 {
97 struct ipa_node_params *info = IPA_NODE_REF (node);
98 /* Unreachable nodes should have been eliminated before ipcp and
99 inlining. */
100 gcc_assert (node->needed || node->reachable);
101 ipa_push_func_to_list_1 (&wl, node, info);
102 }
103
104 return wl;
105 }
106
107 /* Remove a function from the worklist WL and return it. */
108
109 struct cgraph_node *
110 ipa_pop_func_from_list (struct ipa_func_list **wl)
111 {
112 struct ipa_node_params *info;
113 struct ipa_func_list *first;
114 struct cgraph_node *node;
115
116 first = *wl;
117 *wl = (*wl)->next;
118 node = first->node;
119 free (first);
120
121 info = IPA_NODE_REF (node);
122 info->node_enqueued = 0;
123 return node;
124 }
125
126 /* Return index of the formal whose tree is PTREE in function which corresponds
127 to INFO. */
128
129 static int
130 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
131 {
132 int i, count;
133
134 count = ipa_get_param_count (info);
135 for (i = 0; i < count; i++)
136 if (ipa_get_param(info, i) == ptree)
137 return i;
138
139 return -1;
140 }
141
142 /* Populate the param_decl field in parameter descriptors of INFO that
143 corresponds to NODE. */
144
145 static void
146 ipa_populate_param_decls (struct cgraph_node *node,
147 struct ipa_node_params *info)
148 {
149 tree fndecl;
150 tree fnargs;
151 tree parm;
152 int param_num;
153
154 fndecl = node->decl;
155 fnargs = DECL_ARGUMENTS (fndecl);
156 param_num = 0;
157 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
158 {
159 info->params[param_num].decl = parm;
160 param_num++;
161 }
162 }
163
164 /* Return how many formal parameters FNDECL has. */
165
166 static inline int
167 count_formal_params_1 (tree fndecl)
168 {
169 tree parm;
170 int count = 0;
171
172 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
173 count++;
174
175 return count;
176 }
177
178 /* Count number of formal parameters in NOTE. Store the result to the
179 appropriate field of INFO. */
180
181 static void
182 ipa_count_formal_params (struct cgraph_node *node,
183 struct ipa_node_params *info)
184 {
185 int param_num;
186
187 param_num = count_formal_params_1 (node->decl);
188 ipa_set_param_count (info, param_num);
189 }
190
191 /* Initialize the ipa_node_params structure associated with NODE by counting
192 the function parameters, creating the descriptors and populating their
193 param_decls. */
194
195 void
196 ipa_initialize_node_params (struct cgraph_node *node)
197 {
198 struct ipa_node_params *info = IPA_NODE_REF (node);
199
200 if (!info->params)
201 {
202 ipa_count_formal_params (node, info);
203 info->params = XCNEWVEC (struct ipa_param_descriptor,
204 ipa_get_param_count (info));
205 ipa_populate_param_decls (node, info);
206 }
207 }
208
209 /* Count number of arguments callsite CS has and store it in
210 ipa_edge_args structure corresponding to this callsite. */
211
212 static void
213 ipa_count_arguments (struct cgraph_edge *cs)
214 {
215 gimple stmt;
216 int arg_num;
217
218 stmt = cs->call_stmt;
219 gcc_assert (is_gimple_call (stmt));
220 arg_num = gimple_call_num_args (stmt);
221 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
222 <= (unsigned) cgraph_edge_max_uid)
223 VEC_safe_grow_cleared (ipa_edge_args_t, gc,
224 ipa_edge_args_vector, cgraph_edge_max_uid + 1);
225 ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
226 }
227
228 /* Print the jump functions associated with call graph edge CS to file F. */
229
230 static void
231 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
232 {
233 int i, count;
234
235 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
236 for (i = 0; i < count; i++)
237 {
238 struct ipa_jump_func *jump_func;
239 enum jump_func_type type;
240
241 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
242 type = jump_func->type;
243
244 fprintf (f, " param %d: ", i);
245 if (type == IPA_JF_UNKNOWN)
246 fprintf (f, "UNKNOWN\n");
247 else if (type == IPA_JF_KNOWN_TYPE)
248 {
249 tree binfo_type = TREE_TYPE (jump_func->value.base_binfo);
250 fprintf (f, "KNOWN TYPE, type in binfo is: ");
251 print_generic_expr (f, binfo_type, 0);
252 fprintf (f, " (%u)\n", TYPE_UID (binfo_type));
253 }
254 else if (type == IPA_JF_CONST)
255 {
256 tree val = jump_func->value.constant;
257 fprintf (f, "CONST: ");
258 print_generic_expr (f, val, 0);
259 if (TREE_CODE (val) == ADDR_EXPR
260 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
261 {
262 fprintf (f, " -> ");
263 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
264 0);
265 }
266 fprintf (f, "\n");
267 }
268 else if (type == IPA_JF_CONST_MEMBER_PTR)
269 {
270 fprintf (f, "CONST MEMBER PTR: ");
271 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
272 fprintf (f, ", ");
273 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
274 fprintf (f, "\n");
275 }
276 else if (type == IPA_JF_PASS_THROUGH)
277 {
278 fprintf (f, "PASS THROUGH: ");
279 fprintf (f, "%d, op %s ",
280 jump_func->value.pass_through.formal_id,
281 tree_code_name[(int)
282 jump_func->value.pass_through.operation]);
283 if (jump_func->value.pass_through.operation != NOP_EXPR)
284 print_generic_expr (dump_file,
285 jump_func->value.pass_through.operand, 0);
286 fprintf (dump_file, "\n");
287 }
288 else if (type == IPA_JF_ANCESTOR)
289 {
290 fprintf (f, "ANCESTOR: ");
291 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
292 jump_func->value.ancestor.formal_id,
293 jump_func->value.ancestor.offset);
294 print_generic_expr (f, jump_func->value.ancestor.type, 0);
295 fprintf (dump_file, "\n");
296 }
297 }
298 }
299
300
301 /* Print the jump functions of all arguments on all call graph edges going from
302 NODE to file F. */
303
304 void
305 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
306 {
307 struct cgraph_edge *cs;
308 int i;
309
310 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
311 for (cs = node->callees; cs; cs = cs->next_callee)
312 {
313 if (!ipa_edge_args_info_available_for_edge_p (cs))
314 continue;
315
316 fprintf (f, " callsite %s/%i -> %s/%i : \n",
317 cgraph_node_name (node), node->uid,
318 cgraph_node_name (cs->callee), cs->callee->uid);
319 ipa_print_node_jump_functions_for_edge (f, cs);
320 }
321
322 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
323 {
324 if (!ipa_edge_args_info_available_for_edge_p (cs))
325 continue;
326
327 if (cs->call_stmt)
328 {
329 fprintf (f, " indirect callsite %d for stmt ", i);
330 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
331 }
332 else
333 fprintf (f, " indirect callsite %d :\n", i);
334 ipa_print_node_jump_functions_for_edge (f, cs);
335
336 }
337 }
338
339 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
340
341 void
342 ipa_print_all_jump_functions (FILE *f)
343 {
344 struct cgraph_node *node;
345
346 fprintf (f, "\nJump functions:\n");
347 for (node = cgraph_nodes; node; node = node->next)
348 {
349 ipa_print_node_jump_functions (f, node);
350 }
351 }
352
353 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
354 of an assignment statement STMT, try to find out whether NAME can be
355 described by a (possibly polynomial) pass-through jump-function or an
356 ancestor jump function and if so, write the appropriate function into
357 JFUNC */
358
359 static void
360 compute_complex_assign_jump_func (struct ipa_node_params *info,
361 struct ipa_jump_func *jfunc,
362 gimple stmt, tree name)
363 {
364 HOST_WIDE_INT offset, size, max_size;
365 tree op1, op2, type;
366 int index;
367
368 op1 = gimple_assign_rhs1 (stmt);
369 op2 = gimple_assign_rhs2 (stmt);
370
371 if (TREE_CODE (op1) == SSA_NAME
372 && SSA_NAME_IS_DEFAULT_DEF (op1))
373 {
374 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
375 if (index < 0)
376 return;
377
378 if (op2)
379 {
380 if (!is_gimple_ip_invariant (op2)
381 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
382 && !useless_type_conversion_p (TREE_TYPE (name),
383 TREE_TYPE (op1))))
384 return;
385
386 jfunc->type = IPA_JF_PASS_THROUGH;
387 jfunc->value.pass_through.formal_id = index;
388 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
389 jfunc->value.pass_through.operand = op2;
390 }
391 else if (gimple_assign_unary_nop_p (stmt))
392 {
393 jfunc->type = IPA_JF_PASS_THROUGH;
394 jfunc->value.pass_through.formal_id = index;
395 jfunc->value.pass_through.operation = NOP_EXPR;
396 }
397 return;
398 }
399
400 if (TREE_CODE (op1) != ADDR_EXPR)
401 return;
402
403 op1 = TREE_OPERAND (op1, 0);
404 type = TREE_TYPE (op1);
405 if (TREE_CODE (type) != RECORD_TYPE)
406 return;
407 op1 = get_ref_base_and_extent (op1, &offset, &size, &max_size);
408 if (TREE_CODE (op1) != MEM_REF
409 /* If this is a varying address, punt. */
410 || max_size == -1
411 || max_size != size)
412 return;
413 offset += mem_ref_offset (op1).low * BITS_PER_UNIT;
414 op1 = TREE_OPERAND (op1, 0);
415 if (TREE_CODE (op1) != SSA_NAME
416 || !SSA_NAME_IS_DEFAULT_DEF (op1))
417 return;
418
419 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
420 if (index >= 0)
421 {
422 jfunc->type = IPA_JF_ANCESTOR;
423 jfunc->value.ancestor.formal_id = index;
424 jfunc->value.ancestor.offset = offset;
425 jfunc->value.ancestor.type = type;
426 }
427 }
428
429
430 /* Given that an actual argument is an SSA_NAME that is a result of a phi
431 statement PHI, try to find out whether NAME is in fact a
432 multiple-inheritance typecast from a descendant into an ancestor of a formal
433 parameter and thus can be described by an ancestor jump function and if so,
434 write the appropriate function into JFUNC.
435
436 Essentially we want to match the following pattern:
437
438 if (obj_2(D) != 0B)
439 goto <bb 3>;
440 else
441 goto <bb 4>;
442
443 <bb 3>:
444 iftmp.1_3 = &obj_2(D)->D.1762;
445
446 <bb 4>:
447 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
448 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
449 return D.1879_6; */
450
451 static void
452 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
453 struct ipa_jump_func *jfunc,
454 gimple phi)
455 {
456 HOST_WIDE_INT offset, size, max_size;
457 gimple assign, cond;
458 basic_block phi_bb, assign_bb, cond_bb;
459 tree tmp, parm, expr;
460 int index, i;
461
462 if (gimple_phi_num_args (phi) != 2
463 || !integer_zerop (PHI_ARG_DEF (phi, 1)))
464 return;
465
466 tmp = PHI_ARG_DEF (phi, 0);
467 if (TREE_CODE (tmp) != SSA_NAME
468 || SSA_NAME_IS_DEFAULT_DEF (tmp)
469 || !POINTER_TYPE_P (TREE_TYPE (tmp))
470 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
471 return;
472
473 assign = SSA_NAME_DEF_STMT (tmp);
474 assign_bb = gimple_bb (assign);
475 if (!single_pred_p (assign_bb)
476 || !gimple_assign_single_p (assign))
477 return;
478 expr = gimple_assign_rhs1 (assign);
479
480 if (TREE_CODE (expr) != ADDR_EXPR)
481 return;
482 expr = TREE_OPERAND (expr, 0);
483 expr = get_ref_base_and_extent (expr, &offset, &size, &max_size);
484
485 if (TREE_CODE (expr) != MEM_REF
486 /* If this is a varying address, punt. */
487 || max_size == -1
488 || max_size != size)
489 return;
490 offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
491 parm = TREE_OPERAND (expr, 0);
492 if (TREE_CODE (parm) != SSA_NAME
493 || !SSA_NAME_IS_DEFAULT_DEF (parm))
494 return;
495
496 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
497 if (index < 0)
498 return;
499
500 cond_bb = single_pred (assign_bb);
501 cond = last_stmt (cond_bb);
502 if (!cond
503 || gimple_code (cond) != GIMPLE_COND
504 || gimple_cond_code (cond) != NE_EXPR
505 || gimple_cond_lhs (cond) != parm
506 || !integer_zerop (gimple_cond_rhs (cond)))
507 return;
508
509
510 phi_bb = gimple_bb (phi);
511 for (i = 0; i < 2; i++)
512 {
513 basic_block pred = EDGE_PRED (phi_bb, i)->src;
514 if (pred != assign_bb && pred != cond_bb)
515 return;
516 }
517
518 jfunc->type = IPA_JF_ANCESTOR;
519 jfunc->value.ancestor.formal_id = index;
520 jfunc->value.ancestor.offset = offset;
521 jfunc->value.ancestor.type = TREE_TYPE (TREE_TYPE (tmp));
522 }
523
524 /* Given OP whch is passed as an actual argument to a called function,
525 determine if it is possible to construct a KNOWN_TYPE jump function for it
526 and if so, create one and store it to JFUNC. */
527
528 static void
529 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc)
530 {
531 tree binfo;
532
533 if (TREE_CODE (op) != ADDR_EXPR)
534 return;
535
536 op = TREE_OPERAND (op, 0);
537 binfo = gimple_get_relevant_ref_binfo (op, NULL_TREE);
538 if (binfo)
539 {
540 jfunc->type = IPA_JF_KNOWN_TYPE;
541 jfunc->value.base_binfo = binfo;
542 }
543 }
544
545
546 /* Determine the jump functions of scalar arguments. Scalar means SSA names
547 and constants of a number of selected types. INFO is the ipa_node_params
548 structure associated with the caller, FUNCTIONS is a pointer to an array of
549 jump function structures associated with CALL which is the call statement
550 being examined.*/
551
552 static void
553 compute_scalar_jump_functions (struct ipa_node_params *info,
554 struct ipa_jump_func *functions,
555 gimple call)
556 {
557 tree arg;
558 unsigned num = 0;
559
560 for (num = 0; num < gimple_call_num_args (call); num++)
561 {
562 arg = gimple_call_arg (call, num);
563
564 if (is_gimple_ip_invariant (arg))
565 {
566 functions[num].type = IPA_JF_CONST;
567 functions[num].value.constant = arg;
568 }
569 else if (TREE_CODE (arg) == SSA_NAME)
570 {
571 if (SSA_NAME_IS_DEFAULT_DEF (arg))
572 {
573 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
574
575 if (index >= 0)
576 {
577 functions[num].type = IPA_JF_PASS_THROUGH;
578 functions[num].value.pass_through.formal_id = index;
579 functions[num].value.pass_through.operation = NOP_EXPR;
580 }
581 }
582 else
583 {
584 gimple stmt = SSA_NAME_DEF_STMT (arg);
585 if (is_gimple_assign (stmt))
586 compute_complex_assign_jump_func (info, &functions[num],
587 stmt, arg);
588 else if (gimple_code (stmt) == GIMPLE_PHI)
589 compute_complex_ancestor_jump_func (info, &functions[num],
590 stmt);
591 }
592 }
593 else
594 compute_known_type_jump_func (arg, &functions[num]);
595 }
596 }
597
598 /* Inspect the given TYPE and return true iff it has the same structure (the
599 same number of fields of the same types) as a C++ member pointer. If
600 METHOD_PTR and DELTA are non-NULL, store the trees representing the
601 corresponding fields there. */
602
603 static bool
604 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
605 {
606 tree fld;
607
608 if (TREE_CODE (type) != RECORD_TYPE)
609 return false;
610
611 fld = TYPE_FIELDS (type);
612 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
613 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
614 return false;
615
616 if (method_ptr)
617 *method_ptr = fld;
618
619 fld = DECL_CHAIN (fld);
620 if (!fld || INTEGRAL_TYPE_P (fld))
621 return false;
622 if (delta)
623 *delta = fld;
624
625 if (DECL_CHAIN (fld))
626 return false;
627
628 return true;
629 }
630
631 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
632 boolean variable pointed to by DATA. */
633
634 static bool
635 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
636 void *data)
637 {
638 bool *b = (bool *) data;
639 *b = true;
640 return true;
641 }
642
643 /* Return true if the formal parameter PARM might have been modified in this
644 function before reaching the statement CALL. PARM_INFO is a pointer to a
645 structure containing intermediate information about PARM. */
646
647 static bool
648 is_parm_modified_before_call (struct param_analysis_info *parm_info,
649 gimple call, tree parm)
650 {
651 bool modified = false;
652 ao_ref refd;
653
654 if (parm_info->modified)
655 return true;
656
657 ao_ref_init (&refd, parm);
658 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
659 &modified, &parm_info->visited_statements);
660 if (modified)
661 {
662 parm_info->modified = true;
663 return true;
664 }
665 return false;
666 }
667
668 /* Go through arguments of the CALL and for every one that looks like a member
669 pointer, check whether it can be safely declared pass-through and if so,
670 mark that to the corresponding item of jump FUNCTIONS. Return true iff
671 there are non-pass-through member pointers within the arguments. INFO
672 describes formal parameters of the caller. PARMS_INFO is a pointer to a
673 vector containing intermediate information about each formal parameter. */
674
675 static bool
676 compute_pass_through_member_ptrs (struct ipa_node_params *info,
677 struct param_analysis_info *parms_info,
678 struct ipa_jump_func *functions,
679 gimple call)
680 {
681 bool undecided_members = false;
682 unsigned num;
683 tree arg;
684
685 for (num = 0; num < gimple_call_num_args (call); num++)
686 {
687 arg = gimple_call_arg (call, num);
688
689 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
690 {
691 if (TREE_CODE (arg) == PARM_DECL)
692 {
693 int index = ipa_get_param_decl_index (info, arg);
694
695 gcc_assert (index >=0);
696 if (!is_parm_modified_before_call (&parms_info[index], call, arg))
697 {
698 functions[num].type = IPA_JF_PASS_THROUGH;
699 functions[num].value.pass_through.formal_id = index;
700 functions[num].value.pass_through.operation = NOP_EXPR;
701 }
702 else
703 undecided_members = true;
704 }
705 else
706 undecided_members = true;
707 }
708 }
709
710 return undecided_members;
711 }
712
713 /* Simple function filling in a member pointer constant jump function (with PFN
714 and DELTA as the constant value) into JFUNC. */
715
716 static void
717 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
718 tree pfn, tree delta)
719 {
720 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
721 jfunc->value.member_cst.pfn = pfn;
722 jfunc->value.member_cst.delta = delta;
723 }
724
725 /* If RHS is an SSA_NAMe and it is defined by a simple copy assign statement,
726 return the rhs of its defining statement. */
727
728 static inline tree
729 get_ssa_def_if_simple_copy (tree rhs)
730 {
731 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
732 {
733 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
734
735 if (gimple_assign_single_p (def_stmt))
736 rhs = gimple_assign_rhs1 (def_stmt);
737 else
738 break;
739 }
740 return rhs;
741 }
742
743 /* Traverse statements from CALL backwards, scanning whether the argument ARG
744 which is a member pointer is filled in with constant values. If it is, fill
745 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
746 fields of the record type of the member pointer. To give an example, we
747 look for a pattern looking like the following:
748
749 D.2515.__pfn ={v} printStuff;
750 D.2515.__delta ={v} 0;
751 i_1 = doprinting (D.2515); */
752
753 static void
754 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
755 tree delta_field, struct ipa_jump_func *jfunc)
756 {
757 gimple_stmt_iterator gsi;
758 tree method = NULL_TREE;
759 tree delta = NULL_TREE;
760
761 gsi = gsi_for_stmt (call);
762
763 gsi_prev (&gsi);
764 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
765 {
766 gimple stmt = gsi_stmt (gsi);
767 tree lhs, rhs, fld;
768
769 if (!stmt_may_clobber_ref_p (stmt, arg))
770 continue;
771 if (!gimple_assign_single_p (stmt))
772 return;
773
774 lhs = gimple_assign_lhs (stmt);
775 rhs = gimple_assign_rhs1 (stmt);
776
777 if (TREE_CODE (lhs) != COMPONENT_REF
778 || TREE_OPERAND (lhs, 0) != arg)
779 return;
780
781 fld = TREE_OPERAND (lhs, 1);
782 if (!method && fld == method_field)
783 {
784 rhs = get_ssa_def_if_simple_copy (rhs);
785 if (TREE_CODE (rhs) == ADDR_EXPR
786 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
787 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
788 {
789 method = TREE_OPERAND (rhs, 0);
790 if (delta)
791 {
792 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
793 return;
794 }
795 }
796 else
797 return;
798 }
799
800 if (!delta && fld == delta_field)
801 {
802 rhs = get_ssa_def_if_simple_copy (rhs);
803 if (TREE_CODE (rhs) == INTEGER_CST)
804 {
805 delta = rhs;
806 if (method)
807 {
808 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
809 return;
810 }
811 }
812 else
813 return;
814 }
815 }
816
817 return;
818 }
819
820 /* Go through the arguments of the CALL and for every member pointer within
821 tries determine whether it is a constant. If it is, create a corresponding
822 constant jump function in FUNCTIONS which is an array of jump functions
823 associated with the call. */
824
825 static void
826 compute_cst_member_ptr_arguments (struct ipa_jump_func *functions,
827 gimple call)
828 {
829 unsigned num;
830 tree arg, method_field, delta_field;
831
832 for (num = 0; num < gimple_call_num_args (call); num++)
833 {
834 arg = gimple_call_arg (call, num);
835
836 if (functions[num].type == IPA_JF_UNKNOWN
837 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
838 &delta_field))
839 determine_cst_member_ptr (call, arg, method_field, delta_field,
840 &functions[num]);
841 }
842 }
843
844 /* Compute jump function for all arguments of callsite CS and insert the
845 information in the jump_functions array in the ipa_edge_args corresponding
846 to this callsite. */
847
848 static void
849 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_info,
850 struct cgraph_edge *cs)
851 {
852 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
853 struct ipa_edge_args *arguments = IPA_EDGE_REF (cs);
854 gimple call;
855
856 if (ipa_get_cs_argument_count (arguments) == 0 || arguments->jump_functions)
857 return;
858 arguments->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
859 (ipa_get_cs_argument_count (arguments));
860
861 call = cs->call_stmt;
862 gcc_assert (is_gimple_call (call));
863
864 /* We will deal with constants and SSA scalars first: */
865 compute_scalar_jump_functions (info, arguments->jump_functions, call);
866
867 /* Let's check whether there are any potential member pointers and if so,
868 whether we can determine their functions as pass_through. */
869 if (!compute_pass_through_member_ptrs (info, parms_info,
870 arguments->jump_functions, call))
871 return;
872
873 /* Finally, let's check whether we actually pass a new constant member
874 pointer here... */
875 compute_cst_member_ptr_arguments (arguments->jump_functions, call);
876 }
877
878 /* Compute jump functions for all edges - both direct and indirect - outgoing
879 from NODE. Also count the actual arguments in the process. */
880
881 static void
882 ipa_compute_jump_functions (struct cgraph_node *node,
883 struct param_analysis_info *parms_info)
884 {
885 struct cgraph_edge *cs;
886
887 for (cs = node->callees; cs; cs = cs->next_callee)
888 {
889 /* We do not need to bother analyzing calls to unknown
890 functions unless they may become known during lto/whopr. */
891 if (!cs->callee->analyzed && !flag_lto && !flag_whopr)
892 continue;
893 ipa_count_arguments (cs);
894 /* If the descriptor of the callee is not initialized yet, we have to do
895 it now. */
896 if (cs->callee->analyzed)
897 ipa_initialize_node_params (cs->callee);
898 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
899 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
900 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
901 ipa_compute_jump_functions_for_edge (parms_info, cs);
902 }
903
904 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
905 {
906 ipa_count_arguments (cs);
907 ipa_compute_jump_functions_for_edge (parms_info, cs);
908 }
909 }
910
911 /* If RHS looks like a rhs of a statement loading pfn from a member
912 pointer formal parameter, return the parameter, otherwise return
913 NULL. If USE_DELTA, then we look for a use of the delta field
914 rather than the pfn. */
915
916 static tree
917 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
918 {
919 tree rec, fld;
920 tree ptr_field;
921 tree delta_field;
922
923 if (TREE_CODE (rhs) != COMPONENT_REF)
924 return NULL_TREE;
925
926 rec = TREE_OPERAND (rhs, 0);
927 if (TREE_CODE (rec) != PARM_DECL
928 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
929 return NULL_TREE;
930
931 fld = TREE_OPERAND (rhs, 1);
932 if (use_delta ? (fld == delta_field) : (fld == ptr_field))
933 return rec;
934 else
935 return NULL_TREE;
936 }
937
938 /* If STMT looks like a statement loading a value from a member pointer formal
939 parameter, this function returns that parameter. */
940
941 static tree
942 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
943 {
944 tree rhs;
945
946 if (!gimple_assign_single_p (stmt))
947 return NULL_TREE;
948
949 rhs = gimple_assign_rhs1 (stmt);
950 return ipa_get_member_ptr_load_param (rhs, use_delta);
951 }
952
953 /* Returns true iff T is an SSA_NAME defined by a statement. */
954
955 static bool
956 ipa_is_ssa_with_stmt_def (tree t)
957 {
958 if (TREE_CODE (t) == SSA_NAME
959 && !SSA_NAME_IS_DEFAULT_DEF (t))
960 return true;
961 else
962 return false;
963 }
964
965 /* Find the indirect call graph edge corresponding to STMT and add to it all
966 information necessary to describe a call to a parameter number PARAM_INDEX.
967 NODE is the caller. POLYMORPHIC should be set to true iff the call is a
968 virtual one. */
969
970 static void
971 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt,
972 bool polymorphic)
973 {
974 struct cgraph_edge *cs;
975
976 cs = cgraph_edge (node, stmt);
977 cs->indirect_info->param_index = param_index;
978 cs->indirect_info->anc_offset = 0;
979 cs->indirect_info->polymorphic = polymorphic;
980 if (polymorphic)
981 {
982 tree otr = gimple_call_fn (stmt);
983 tree type, token = OBJ_TYPE_REF_TOKEN (otr);
984 cs->indirect_info->otr_token = tree_low_cst (token, 1);
985 type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (otr)));
986 cs->indirect_info->otr_type = type;
987 }
988 }
989
990 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
991 (described by INFO). PARMS_INFO is a pointer to a vector containing
992 intermediate information about each formal parameter. Currently it checks
993 whether the call calls a pointer that is a formal parameter and if so, the
994 parameter is marked with the called flag and an indirect call graph edge
995 describing the call is created. This is very simple for ordinary pointers
996 represented in SSA but not-so-nice when it comes to member pointers. The
997 ugly part of this function does nothing more than trying to match the
998 pattern of such a call. An example of such a pattern is the gimple dump
999 below, the call is on the last line:
1000
1001 <bb 2>:
1002 f$__delta_5 = f.__delta;
1003 f$__pfn_24 = f.__pfn;
1004
1005 ...
1006
1007 <bb 5>
1008 D.2496_3 = (int) f$__pfn_24;
1009 D.2497_4 = D.2496_3 & 1;
1010 if (D.2497_4 != 0)
1011 goto <bb 3>;
1012 else
1013 goto <bb 4>;
1014
1015 <bb 6>:
1016 D.2500_7 = (unsigned int) f$__delta_5;
1017 D.2501_8 = &S + D.2500_7;
1018 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1019 D.2503_10 = *D.2502_9;
1020 D.2504_12 = f$__pfn_24 + -1;
1021 D.2505_13 = (unsigned int) D.2504_12;
1022 D.2506_14 = D.2503_10 + D.2505_13;
1023 D.2507_15 = *D.2506_14;
1024 iftmp.11_16 = (String:: *) D.2507_15;
1025
1026 <bb 7>:
1027 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1028 D.2500_19 = (unsigned int) f$__delta_5;
1029 D.2508_20 = &S + D.2500_19;
1030 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1031
1032 Such patterns are results of simple calls to a member pointer:
1033
1034 int doprinting (int (MyString::* f)(int) const)
1035 {
1036 MyString S ("somestring");
1037
1038 return (S.*f)(4);
1039 }
1040 */
1041
1042 static void
1043 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1044 struct ipa_node_params *info,
1045 struct param_analysis_info *parms_info,
1046 gimple call, tree target)
1047 {
1048 gimple def;
1049 tree n1, n2;
1050 gimple d1, d2;
1051 tree rec, rec2, cond;
1052 gimple branch;
1053 int index;
1054 basic_block bb, virt_bb, join;
1055
1056 if (SSA_NAME_IS_DEFAULT_DEF (target))
1057 {
1058 tree var = SSA_NAME_VAR (target);
1059 index = ipa_get_param_decl_index (info, var);
1060 if (index >= 0)
1061 ipa_note_param_call (node, index, call, false);
1062 return;
1063 }
1064
1065 /* Now we need to try to match the complex pattern of calling a member
1066 pointer. */
1067
1068 if (!POINTER_TYPE_P (TREE_TYPE (target))
1069 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1070 return;
1071
1072 def = SSA_NAME_DEF_STMT (target);
1073 if (gimple_code (def) != GIMPLE_PHI)
1074 return;
1075
1076 if (gimple_phi_num_args (def) != 2)
1077 return;
1078
1079 /* First, we need to check whether one of these is a load from a member
1080 pointer that is a parameter to this function. */
1081 n1 = PHI_ARG_DEF (def, 0);
1082 n2 = PHI_ARG_DEF (def, 1);
1083 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1084 return;
1085 d1 = SSA_NAME_DEF_STMT (n1);
1086 d2 = SSA_NAME_DEF_STMT (n2);
1087
1088 join = gimple_bb (def);
1089 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1090 {
1091 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1092 return;
1093
1094 bb = EDGE_PRED (join, 0)->src;
1095 virt_bb = gimple_bb (d2);
1096 }
1097 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1098 {
1099 bb = EDGE_PRED (join, 1)->src;
1100 virt_bb = gimple_bb (d1);
1101 }
1102 else
1103 return;
1104
1105 /* Second, we need to check that the basic blocks are laid out in the way
1106 corresponding to the pattern. */
1107
1108 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1109 || single_pred (virt_bb) != bb
1110 || single_succ (virt_bb) != join)
1111 return;
1112
1113 /* Third, let's see that the branching is done depending on the least
1114 significant bit of the pfn. */
1115
1116 branch = last_stmt (bb);
1117 if (!branch || gimple_code (branch) != GIMPLE_COND)
1118 return;
1119
1120 if (gimple_cond_code (branch) != NE_EXPR
1121 || !integer_zerop (gimple_cond_rhs (branch)))
1122 return;
1123
1124 cond = gimple_cond_lhs (branch);
1125 if (!ipa_is_ssa_with_stmt_def (cond))
1126 return;
1127
1128 def = SSA_NAME_DEF_STMT (cond);
1129 if (!is_gimple_assign (def)
1130 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1131 || !integer_onep (gimple_assign_rhs2 (def)))
1132 return;
1133
1134 cond = gimple_assign_rhs1 (def);
1135 if (!ipa_is_ssa_with_stmt_def (cond))
1136 return;
1137
1138 def = SSA_NAME_DEF_STMT (cond);
1139
1140 if (is_gimple_assign (def)
1141 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1142 {
1143 cond = gimple_assign_rhs1 (def);
1144 if (!ipa_is_ssa_with_stmt_def (cond))
1145 return;
1146 def = SSA_NAME_DEF_STMT (cond);
1147 }
1148
1149 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1150 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1151 == ptrmemfunc_vbit_in_delta));
1152
1153 if (rec != rec2)
1154 return;
1155
1156 index = ipa_get_param_decl_index (info, rec);
1157 if (index >= 0 && !is_parm_modified_before_call (&parms_info[index],
1158 call, rec))
1159 ipa_note_param_call (node, index, call, false);
1160
1161 return;
1162 }
1163
1164 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1165 object referenced in the expression is a formal parameter of the caller
1166 (described by INFO), create a call note for the statement. */
1167
1168 static void
1169 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1170 struct ipa_node_params *info, gimple call,
1171 tree target)
1172 {
1173 tree obj = OBJ_TYPE_REF_OBJECT (target);
1174 tree var;
1175 int index;
1176
1177 if (TREE_CODE (obj) == ADDR_EXPR)
1178 {
1179 do
1180 {
1181 obj = TREE_OPERAND (obj, 0);
1182 }
1183 while (TREE_CODE (obj) == COMPONENT_REF);
1184 if (TREE_CODE (obj) != MEM_REF)
1185 return;
1186 obj = TREE_OPERAND (obj, 0);
1187 }
1188
1189 if (TREE_CODE (obj) != SSA_NAME
1190 || !SSA_NAME_IS_DEFAULT_DEF (obj))
1191 return;
1192
1193 var = SSA_NAME_VAR (obj);
1194 index = ipa_get_param_decl_index (info, var);
1195
1196 if (index >= 0)
1197 ipa_note_param_call (node, index, call, true);
1198 }
1199
1200 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1201 of the caller (described by INFO). PARMS_INFO is a pointer to a vector
1202 containing intermediate information about each formal parameter. */
1203
1204 static void
1205 ipa_analyze_call_uses (struct cgraph_node *node,
1206 struct ipa_node_params *info,
1207 struct param_analysis_info *parms_info, gimple call)
1208 {
1209 tree target = gimple_call_fn (call);
1210
1211 if (TREE_CODE (target) == SSA_NAME)
1212 ipa_analyze_indirect_call_uses (node, info, parms_info, call, target);
1213 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1214 ipa_analyze_virtual_call_uses (node, info, call, target);
1215 }
1216
1217
1218 /* Analyze the call statement STMT with respect to formal parameters (described
1219 in INFO) of caller given by NODE. Currently it only checks whether formal
1220 parameters are called. PARMS_INFO is a pointer to a vector containing
1221 intermediate information about each formal parameter. */
1222
1223 static void
1224 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1225 struct param_analysis_info *parms_info, gimple stmt)
1226 {
1227 if (is_gimple_call (stmt))
1228 ipa_analyze_call_uses (node, info, parms_info, stmt);
1229 }
1230
1231 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1232 If OP is a parameter declaration, mark it as used in the info structure
1233 passed in DATA. */
1234
1235 static bool
1236 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1237 tree op, void *data)
1238 {
1239 struct ipa_node_params *info = (struct ipa_node_params *) data;
1240
1241 op = get_base_address (op);
1242 if (op
1243 && TREE_CODE (op) == PARM_DECL)
1244 {
1245 int index = ipa_get_param_decl_index (info, op);
1246 gcc_assert (index >= 0);
1247 info->params[index].used = true;
1248 }
1249
1250 return false;
1251 }
1252
1253 /* Scan the function body of NODE and inspect the uses of formal parameters.
1254 Store the findings in various structures of the associated ipa_node_params
1255 structure, such as parameter flags, notes etc. PARMS_INFO is a pointer to a
1256 vector containing intermediate information about each formal parameter. */
1257
1258 static void
1259 ipa_analyze_params_uses (struct cgraph_node *node,
1260 struct param_analysis_info *parms_info)
1261 {
1262 tree decl = node->decl;
1263 basic_block bb;
1264 struct function *func;
1265 gimple_stmt_iterator gsi;
1266 struct ipa_node_params *info = IPA_NODE_REF (node);
1267 int i;
1268
1269 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1270 return;
1271
1272 for (i = 0; i < ipa_get_param_count (info); i++)
1273 {
1274 tree parm = ipa_get_param (info, i);
1275 /* For SSA regs see if parameter is used. For non-SSA we compute
1276 the flag during modification analysis. */
1277 if (is_gimple_reg (parm)
1278 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
1279 info->params[i].used = true;
1280 }
1281
1282 func = DECL_STRUCT_FUNCTION (decl);
1283 FOR_EACH_BB_FN (bb, func)
1284 {
1285 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1286 {
1287 gimple stmt = gsi_stmt (gsi);
1288
1289 if (is_gimple_debug (stmt))
1290 continue;
1291
1292 ipa_analyze_stmt_uses (node, info, parms_info, stmt);
1293 walk_stmt_load_store_addr_ops (stmt, info,
1294 visit_ref_for_mod_analysis,
1295 visit_ref_for_mod_analysis,
1296 visit_ref_for_mod_analysis);
1297 }
1298 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
1299 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1300 visit_ref_for_mod_analysis,
1301 visit_ref_for_mod_analysis,
1302 visit_ref_for_mod_analysis);
1303 }
1304
1305 info->uses_analysis_done = 1;
1306 }
1307
1308 /* Initialize the array describing properties of of formal parameters of NODE,
1309 analyze their uses and and compute jump functions associated witu actual
1310 arguments of calls from within NODE. */
1311
1312 void
1313 ipa_analyze_node (struct cgraph_node *node)
1314 {
1315 struct ipa_node_params *info = IPA_NODE_REF (node);
1316 struct param_analysis_info *parms_info;
1317 int i, param_count;
1318
1319 ipa_initialize_node_params (node);
1320
1321 param_count = ipa_get_param_count (info);
1322 parms_info = XALLOCAVEC (struct param_analysis_info, param_count);
1323 memset (parms_info, 0, sizeof (struct param_analysis_info) * param_count);
1324
1325 ipa_analyze_params_uses (node, parms_info);
1326 ipa_compute_jump_functions (node, parms_info);
1327
1328 for (i = 0; i < param_count; i++)
1329 if (parms_info[i].visited_statements)
1330 BITMAP_FREE (parms_info[i].visited_statements);
1331 }
1332
1333
1334 /* Update the jump function DST when the call graph edge correspondng to SRC is
1335 is being inlined, knowing that DST is of type ancestor and src of known
1336 type. */
1337
1338 static void
1339 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1340 struct ipa_jump_func *dst)
1341 {
1342 tree new_binfo;
1343
1344 new_binfo = get_binfo_at_offset (src->value.base_binfo,
1345 dst->value.ancestor.offset,
1346 dst->value.ancestor.type);
1347 if (new_binfo)
1348 {
1349 dst->type = IPA_JF_KNOWN_TYPE;
1350 dst->value.base_binfo = new_binfo;
1351 }
1352 else
1353 dst->type = IPA_JF_UNKNOWN;
1354 }
1355
1356 /* Update the jump functions associated with call graph edge E when the call
1357 graph edge CS is being inlined, assuming that E->caller is already (possibly
1358 indirectly) inlined into CS->callee and that E has not been inlined. */
1359
1360 static void
1361 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1362 struct cgraph_edge *e)
1363 {
1364 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1365 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1366 int count = ipa_get_cs_argument_count (args);
1367 int i;
1368
1369 for (i = 0; i < count; i++)
1370 {
1371 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1372
1373 if (dst->type == IPA_JF_ANCESTOR)
1374 {
1375 struct ipa_jump_func *src;
1376
1377 /* Variable number of arguments can cause havoc if we try to access
1378 one that does not exist in the inlined edge. So make sure we
1379 don't. */
1380 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1381 {
1382 dst->type = IPA_JF_UNKNOWN;
1383 continue;
1384 }
1385
1386 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1387 if (src->type == IPA_JF_KNOWN_TYPE)
1388 combine_known_type_and_ancestor_jfs (src, dst);
1389 else if (src->type == IPA_JF_CONST)
1390 {
1391 struct ipa_jump_func kt_func;
1392
1393 kt_func.type = IPA_JF_UNKNOWN;
1394 compute_known_type_jump_func (src->value.constant, &kt_func);
1395 if (kt_func.type == IPA_JF_KNOWN_TYPE)
1396 combine_known_type_and_ancestor_jfs (&kt_func, dst);
1397 else
1398 dst->type = IPA_JF_UNKNOWN;
1399 }
1400 else if (src->type == IPA_JF_PASS_THROUGH
1401 && src->value.pass_through.operation == NOP_EXPR)
1402 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1403 else if (src->type == IPA_JF_ANCESTOR)
1404 {
1405 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1406 dst->value.ancestor.offset += src->value.ancestor.offset;
1407 }
1408 else
1409 dst->type = IPA_JF_UNKNOWN;
1410 }
1411 else if (dst->type == IPA_JF_PASS_THROUGH)
1412 {
1413 struct ipa_jump_func *src;
1414 /* We must check range due to calls with variable number of arguments
1415 and we cannot combine jump functions with operations. */
1416 if (dst->value.pass_through.operation == NOP_EXPR
1417 && (dst->value.pass_through.formal_id
1418 < ipa_get_cs_argument_count (top)))
1419 {
1420 src = ipa_get_ith_jump_func (top,
1421 dst->value.pass_through.formal_id);
1422 *dst = *src;
1423 }
1424 else
1425 dst->type = IPA_JF_UNKNOWN;
1426 }
1427 }
1428 }
1429
1430 /* If TARGET is an addr_expr of a function declaration, make it the destination
1431 of an indirect edge IE and return the edge. Otherwise, return NULL. */
1432
1433 struct cgraph_edge *
1434 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
1435 {
1436 struct cgraph_node *callee;
1437
1438 if (TREE_CODE (target) != ADDR_EXPR)
1439 return NULL;
1440 target = TREE_OPERAND (target, 0);
1441 if (TREE_CODE (target) != FUNCTION_DECL)
1442 return NULL;
1443 callee = cgraph_node (target);
1444 if (!callee)
1445 return NULL;
1446
1447 cgraph_make_edge_direct (ie, callee);
1448 if (dump_file)
1449 {
1450 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1451 "(%s/%i -> %s/%i) for stmt ",
1452 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1453 cgraph_node_name (ie->caller), ie->caller->uid,
1454 cgraph_node_name (ie->callee), ie->callee->uid);
1455
1456 if (ie->call_stmt)
1457 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1458 else
1459 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1460 }
1461
1462 if (ipa_get_cs_argument_count (IPA_EDGE_REF (ie))
1463 != ipa_get_param_count (IPA_NODE_REF (callee)))
1464 ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));
1465
1466 return ie;
1467 }
1468
1469 /* Try to find a destination for indirect edge IE that corresponds to a simple
1470 call or a call of a member function pointer and where the destination is a
1471 pointer formal parameter described by jump function JFUNC. If it can be
1472 determined, return the newly direct edge, otherwise return NULL. */
1473
1474 static struct cgraph_edge *
1475 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1476 struct ipa_jump_func *jfunc)
1477 {
1478 tree target;
1479
1480 if (jfunc->type == IPA_JF_CONST)
1481 target = jfunc->value.constant;
1482 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1483 target = jfunc->value.member_cst.pfn;
1484 else
1485 return NULL;
1486
1487 return ipa_make_edge_direct_to_target (ie, target);
1488 }
1489
1490 /* Try to find a destination for indirect edge IE that corresponds to a
1491 virtuall call based on a formal parameter which is described by jump
1492 function JFUNC and if it can be determined, make it direct and return the
1493 direct edge. Otherwise, return NULL. */
1494
1495 static struct cgraph_edge *
1496 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1497 struct ipa_jump_func *jfunc)
1498 {
1499 tree binfo, type, target;
1500 HOST_WIDE_INT token;
1501
1502 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1503 binfo = jfunc->value.base_binfo;
1504 else if (jfunc->type == IPA_JF_CONST)
1505 {
1506 tree cst = jfunc->value.constant;
1507 if (TREE_CODE (cst) == ADDR_EXPR)
1508 binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (cst, 0),
1509 NULL_TREE);
1510 else
1511 return NULL;
1512 }
1513 else
1514 return NULL;
1515
1516 if (!binfo)
1517 return NULL;
1518
1519 token = ie->indirect_info->otr_token;
1520 type = ie->indirect_info->otr_type;
1521 binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
1522 if (binfo)
1523 target = gimple_fold_obj_type_ref_known_binfo (token, binfo);
1524 else
1525 return NULL;
1526
1527 if (target)
1528 return ipa_make_edge_direct_to_target (ie, target);
1529 else
1530 return NULL;
1531 }
1532
1533 /* Update the param called notes associated with NODE when CS is being inlined,
1534 assuming NODE is (potentially indirectly) inlined into CS->callee.
1535 Moreover, if the callee is discovered to be constant, create a new cgraph
1536 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1537 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1538
1539 static bool
1540 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1541 struct cgraph_node *node,
1542 VEC (cgraph_edge_p, heap) **new_edges)
1543 {
1544 struct ipa_edge_args *top;
1545 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1546 bool res = false;
1547
1548 ipa_check_create_edge_args ();
1549 top = IPA_EDGE_REF (cs);
1550
1551 for (ie = node->indirect_calls; ie; ie = next_ie)
1552 {
1553 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1554 struct ipa_jump_func *jfunc;
1555
1556 next_ie = ie->next_callee;
1557 if (bitmap_bit_p (iinlining_processed_edges, ie->uid))
1558 continue;
1559
1560 /* If we ever use indirect edges for anything other than indirect
1561 inlining, we will need to skip those with negative param_indices. */
1562 if (ici->param_index == -1)
1563 continue;
1564
1565 /* We must check range due to calls with variable number of arguments: */
1566 if (ici->param_index >= ipa_get_cs_argument_count (top))
1567 {
1568 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1569 continue;
1570 }
1571
1572 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1573 if (jfunc->type == IPA_JF_PASS_THROUGH
1574 && jfunc->value.pass_through.operation == NOP_EXPR)
1575 ici->param_index = jfunc->value.pass_through.formal_id;
1576 else if (jfunc->type == IPA_JF_ANCESTOR)
1577 {
1578 ici->param_index = jfunc->value.ancestor.formal_id;
1579 ici->anc_offset += jfunc->value.ancestor.offset;
1580 }
1581 else
1582 /* Either we can find a destination for this edge now or never. */
1583 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1584
1585 if (ici->polymorphic)
1586 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1587 else
1588 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1589
1590 if (new_direct_edge)
1591 {
1592 new_direct_edge->indirect_inlining_edge = 1;
1593 if (new_edges)
1594 {
1595 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1596 new_direct_edge);
1597 top = IPA_EDGE_REF (cs);
1598 res = true;
1599 }
1600 }
1601 }
1602
1603 return res;
1604 }
1605
1606 /* Recursively traverse subtree of NODE (including node) made of inlined
1607 cgraph_edges when CS has been inlined and invoke
1608 update_indirect_edges_after_inlining on all nodes and
1609 update_jump_functions_after_inlining on all non-inlined edges that lead out
1610 of this subtree. Newly discovered indirect edges will be added to
1611 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1612 created. */
1613
1614 static bool
1615 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1616 struct cgraph_node *node,
1617 VEC (cgraph_edge_p, heap) **new_edges)
1618 {
1619 struct cgraph_edge *e;
1620 bool res;
1621
1622 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1623
1624 for (e = node->callees; e; e = e->next_callee)
1625 if (!e->inline_failed)
1626 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1627 else
1628 update_jump_functions_after_inlining (cs, e);
1629
1630 return res;
1631 }
1632
1633 /* Update jump functions and call note functions on inlining the call site CS.
1634 CS is expected to lead to a node already cloned by
1635 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1636 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1637 created. */
1638
1639 bool
1640 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1641 VEC (cgraph_edge_p, heap) **new_edges)
1642 {
1643 /* FIXME lto: We do not stream out indirect call information. */
1644 if (flag_wpa)
1645 return false;
1646
1647 /* Do nothing if the preparation phase has not been carried out yet
1648 (i.e. during early inlining). */
1649 if (!ipa_node_params_vector)
1650 return false;
1651 gcc_assert (ipa_edge_args_vector);
1652
1653 return propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1654 }
1655
1656 /* Frees all dynamically allocated structures that the argument info points
1657 to. */
1658
1659 void
1660 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1661 {
1662 if (args->jump_functions)
1663 ggc_free (args->jump_functions);
1664
1665 memset (args, 0, sizeof (*args));
1666 }
1667
1668 /* Free all ipa_edge structures. */
1669
1670 void
1671 ipa_free_all_edge_args (void)
1672 {
1673 int i;
1674 struct ipa_edge_args *args;
1675
1676 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
1677 ipa_free_edge_args_substructures (args);
1678
1679 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
1680 ipa_edge_args_vector = NULL;
1681 }
1682
1683 /* Frees all dynamically allocated structures that the param info points
1684 to. */
1685
1686 void
1687 ipa_free_node_params_substructures (struct ipa_node_params *info)
1688 {
1689 if (info->params)
1690 free (info->params);
1691
1692 memset (info, 0, sizeof (*info));
1693 }
1694
1695 /* Free all ipa_node_params structures. */
1696
1697 void
1698 ipa_free_all_node_params (void)
1699 {
1700 int i;
1701 struct ipa_node_params *info;
1702
1703 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
1704 ipa_free_node_params_substructures (info);
1705
1706 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
1707 ipa_node_params_vector = NULL;
1708 }
1709
1710 /* Hook that is called by cgraph.c when an edge is removed. */
1711
1712 static void
1713 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
1714 {
1715 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1716 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
1717 <= (unsigned)cs->uid)
1718 return;
1719 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1720 }
1721
1722 /* Hook that is called by cgraph.c when a node is removed. */
1723
1724 static void
1725 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1726 {
1727 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1728 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
1729 <= (unsigned)node->uid)
1730 return;
1731 ipa_free_node_params_substructures (IPA_NODE_REF (node));
1732 }
1733
1734 /* Helper function to duplicate an array of size N that is at SRC and store a
1735 pointer to it to DST. Nothing is done if SRC is NULL. */
1736
1737 static void *
1738 duplicate_array (void *src, size_t n)
1739 {
1740 void *p;
1741
1742 if (!src)
1743 return NULL;
1744
1745 p = xmalloc (n);
1746 memcpy (p, src, n);
1747 return p;
1748 }
1749
1750 static struct ipa_jump_func *
1751 duplicate_ipa_jump_func_array (const struct ipa_jump_func * src, size_t n)
1752 {
1753 struct ipa_jump_func *p;
1754
1755 if (!src)
1756 return NULL;
1757
1758 p = ggc_alloc_vec_ipa_jump_func (n);
1759 memcpy (p, src, n * sizeof (struct ipa_jump_func));
1760 return p;
1761 }
1762
1763 /* Hook that is called by cgraph.c when a node is duplicated. */
1764
1765 static void
1766 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
1767 __attribute__((unused)) void *data)
1768 {
1769 struct ipa_edge_args *old_args, *new_args;
1770 int arg_count;
1771
1772 ipa_check_create_edge_args ();
1773
1774 old_args = IPA_EDGE_REF (src);
1775 new_args = IPA_EDGE_REF (dst);
1776
1777 arg_count = ipa_get_cs_argument_count (old_args);
1778 ipa_set_cs_argument_count (new_args, arg_count);
1779 new_args->jump_functions =
1780 duplicate_ipa_jump_func_array (old_args->jump_functions, arg_count);
1781
1782 if (iinlining_processed_edges
1783 && bitmap_bit_p (iinlining_processed_edges, src->uid))
1784 bitmap_set_bit (iinlining_processed_edges, dst->uid);
1785 }
1786
1787 /* Hook that is called by cgraph.c when a node is duplicated. */
1788
1789 static void
1790 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
1791 __attribute__((unused)) void *data)
1792 {
1793 struct ipa_node_params *old_info, *new_info;
1794 int param_count, i;
1795
1796 ipa_check_create_node_params ();
1797 old_info = IPA_NODE_REF (src);
1798 new_info = IPA_NODE_REF (dst);
1799 param_count = ipa_get_param_count (old_info);
1800
1801 ipa_set_param_count (new_info, param_count);
1802 new_info->params = (struct ipa_param_descriptor *)
1803 duplicate_array (old_info->params,
1804 sizeof (struct ipa_param_descriptor) * param_count);
1805 for (i = 0; i < param_count; i++)
1806 new_info->params[i].types = VEC_copy (tree, heap,
1807 old_info->params[i].types);
1808 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
1809 new_info->count_scale = old_info->count_scale;
1810
1811 new_info->called_with_var_arguments = old_info->called_with_var_arguments;
1812 new_info->uses_analysis_done = old_info->uses_analysis_done;
1813 new_info->node_enqueued = old_info->node_enqueued;
1814 }
1815
1816 /* Register our cgraph hooks if they are not already there. */
1817
1818 void
1819 ipa_register_cgraph_hooks (void)
1820 {
1821 if (!edge_removal_hook_holder)
1822 edge_removal_hook_holder =
1823 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
1824 if (!node_removal_hook_holder)
1825 node_removal_hook_holder =
1826 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
1827 if (!edge_duplication_hook_holder)
1828 edge_duplication_hook_holder =
1829 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
1830 if (!node_duplication_hook_holder)
1831 node_duplication_hook_holder =
1832 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
1833 }
1834
1835 /* Unregister our cgraph hooks if they are not already there. */
1836
1837 static void
1838 ipa_unregister_cgraph_hooks (void)
1839 {
1840 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
1841 edge_removal_hook_holder = NULL;
1842 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1843 node_removal_hook_holder = NULL;
1844 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
1845 edge_duplication_hook_holder = NULL;
1846 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1847 node_duplication_hook_holder = NULL;
1848 }
1849
1850 /* Allocate all necessary data strucutures necessary for indirect inlining. */
1851
1852 void
1853 ipa_create_all_structures_for_iinln (void)
1854 {
1855 iinlining_processed_edges = BITMAP_ALLOC (NULL);
1856 }
1857
1858 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1859 longer needed after ipa-cp. */
1860
1861 void
1862 ipa_free_all_structures_after_ipa_cp (void)
1863 {
1864 if (!flag_indirect_inlining)
1865 {
1866 ipa_free_all_edge_args ();
1867 ipa_free_all_node_params ();
1868 ipa_unregister_cgraph_hooks ();
1869 }
1870 }
1871
1872 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1873 longer needed after indirect inlining. */
1874
1875 void
1876 ipa_free_all_structures_after_iinln (void)
1877 {
1878 BITMAP_FREE (iinlining_processed_edges);
1879
1880 ipa_free_all_edge_args ();
1881 ipa_free_all_node_params ();
1882 ipa_unregister_cgraph_hooks ();
1883 }
1884
1885 /* Print ipa_tree_map data structures of all functions in the
1886 callgraph to F. */
1887
1888 void
1889 ipa_print_node_params (FILE * f, struct cgraph_node *node)
1890 {
1891 int i, count;
1892 tree temp;
1893 struct ipa_node_params *info;
1894
1895 if (!node->analyzed)
1896 return;
1897 info = IPA_NODE_REF (node);
1898 fprintf (f, " function %s parameter descriptors:\n",
1899 cgraph_node_name (node));
1900 count = ipa_get_param_count (info);
1901 for (i = 0; i < count; i++)
1902 {
1903 temp = ipa_get_param (info, i);
1904 if (TREE_CODE (temp) == PARM_DECL)
1905 fprintf (f, " param %d : %s", i,
1906 (DECL_NAME (temp)
1907 ? (*lang_hooks.decl_printable_name) (temp, 2)
1908 : "(unnamed)"));
1909 if (ipa_is_param_used (info, i))
1910 fprintf (f, " used");
1911 fprintf (f, "\n");
1912 }
1913 }
1914
1915 /* Print ipa_tree_map data structures of all functions in the
1916 callgraph to F. */
1917
1918 void
1919 ipa_print_all_params (FILE * f)
1920 {
1921 struct cgraph_node *node;
1922
1923 fprintf (f, "\nFunction parameters:\n");
1924 for (node = cgraph_nodes; node; node = node->next)
1925 ipa_print_node_params (f, node);
1926 }
1927
1928 /* Return a heap allocated vector containing formal parameters of FNDECL. */
1929
1930 VEC(tree, heap) *
1931 ipa_get_vector_of_formal_parms (tree fndecl)
1932 {
1933 VEC(tree, heap) *args;
1934 int count;
1935 tree parm;
1936
1937 count = count_formal_params_1 (fndecl);
1938 args = VEC_alloc (tree, heap, count);
1939 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
1940 VEC_quick_push (tree, args, parm);
1941
1942 return args;
1943 }
1944
1945 /* Return a heap allocated vector containing types of formal parameters of
1946 function type FNTYPE. */
1947
1948 static inline VEC(tree, heap) *
1949 get_vector_of_formal_parm_types (tree fntype)
1950 {
1951 VEC(tree, heap) *types;
1952 int count = 0;
1953 tree t;
1954
1955 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
1956 count++;
1957
1958 types = VEC_alloc (tree, heap, count);
1959 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
1960 VEC_quick_push (tree, types, TREE_VALUE (t));
1961
1962 return types;
1963 }
1964
1965 /* Modify the function declaration FNDECL and its type according to the plan in
1966 ADJUSTMENTS. It also sets base fields of individual adjustments structures
1967 to reflect the actual parameters being modified which are determined by the
1968 base_index field. */
1969
1970 void
1971 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
1972 const char *synth_parm_prefix)
1973 {
1974 VEC(tree, heap) *oparms, *otypes;
1975 tree orig_type, new_type = NULL;
1976 tree old_arg_types, t, new_arg_types = NULL;
1977 tree parm, *link = &DECL_ARGUMENTS (fndecl);
1978 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
1979 tree new_reversed = NULL;
1980 bool care_for_types, last_parm_void;
1981
1982 if (!synth_parm_prefix)
1983 synth_parm_prefix = "SYNTH";
1984
1985 oparms = ipa_get_vector_of_formal_parms (fndecl);
1986 orig_type = TREE_TYPE (fndecl);
1987 old_arg_types = TYPE_ARG_TYPES (orig_type);
1988
1989 /* The following test is an ugly hack, some functions simply don't have any
1990 arguments in their type. This is probably a bug but well... */
1991 care_for_types = (old_arg_types != NULL_TREE);
1992 if (care_for_types)
1993 {
1994 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
1995 == void_type_node);
1996 otypes = get_vector_of_formal_parm_types (orig_type);
1997 if (last_parm_void)
1998 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
1999 else
2000 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2001 }
2002 else
2003 {
2004 last_parm_void = false;
2005 otypes = NULL;
2006 }
2007
2008 for (i = 0; i < len; i++)
2009 {
2010 struct ipa_parm_adjustment *adj;
2011 gcc_assert (link);
2012
2013 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2014 parm = VEC_index (tree, oparms, adj->base_index);
2015 adj->base = parm;
2016
2017 if (adj->copy_param)
2018 {
2019 if (care_for_types)
2020 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2021 adj->base_index),
2022 new_arg_types);
2023 *link = parm;
2024 link = &DECL_CHAIN (parm);
2025 }
2026 else if (!adj->remove_param)
2027 {
2028 tree new_parm;
2029 tree ptype;
2030
2031 if (adj->by_ref)
2032 ptype = build_pointer_type (adj->type);
2033 else
2034 ptype = adj->type;
2035
2036 if (care_for_types)
2037 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2038
2039 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2040 ptype);
2041 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2042
2043 DECL_ARTIFICIAL (new_parm) = 1;
2044 DECL_ARG_TYPE (new_parm) = ptype;
2045 DECL_CONTEXT (new_parm) = fndecl;
2046 TREE_USED (new_parm) = 1;
2047 DECL_IGNORED_P (new_parm) = 1;
2048 layout_decl (new_parm, 0);
2049
2050 add_referenced_var (new_parm);
2051 mark_sym_for_renaming (new_parm);
2052 adj->base = parm;
2053 adj->reduction = new_parm;
2054
2055 *link = new_parm;
2056
2057 link = &DECL_CHAIN (new_parm);
2058 }
2059 }
2060
2061 *link = NULL_TREE;
2062
2063 if (care_for_types)
2064 {
2065 new_reversed = nreverse (new_arg_types);
2066 if (last_parm_void)
2067 {
2068 if (new_reversed)
2069 TREE_CHAIN (new_arg_types) = void_list_node;
2070 else
2071 new_reversed = void_list_node;
2072 }
2073 }
2074
2075 /* Use copy_node to preserve as much as possible from original type
2076 (debug info, attribute lists etc.)
2077 Exception is METHOD_TYPEs must have THIS argument.
2078 When we are asked to remove it, we need to build new FUNCTION_TYPE
2079 instead. */
2080 if (TREE_CODE (orig_type) != METHOD_TYPE
2081 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2082 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2083 {
2084 new_type = build_distinct_type_copy (orig_type);
2085 TYPE_ARG_TYPES (new_type) = new_reversed;
2086 }
2087 else
2088 {
2089 new_type
2090 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2091 new_reversed));
2092 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2093 DECL_VINDEX (fndecl) = NULL_TREE;
2094 }
2095
2096 /* When signature changes, we need to clear builtin info. */
2097 if (DECL_BUILT_IN (fndecl))
2098 {
2099 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2100 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2101 }
2102
2103 /* This is a new type, not a copy of an old type. Need to reassociate
2104 variants. We can handle everything except the main variant lazily. */
2105 t = TYPE_MAIN_VARIANT (orig_type);
2106 if (orig_type != t)
2107 {
2108 TYPE_MAIN_VARIANT (new_type) = t;
2109 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2110 TYPE_NEXT_VARIANT (t) = new_type;
2111 }
2112 else
2113 {
2114 TYPE_MAIN_VARIANT (new_type) = new_type;
2115 TYPE_NEXT_VARIANT (new_type) = NULL;
2116 }
2117
2118 TREE_TYPE (fndecl) = new_type;
2119 if (otypes)
2120 VEC_free (tree, heap, otypes);
2121 VEC_free (tree, heap, oparms);
2122 }
2123
2124 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2125 If this is a directly recursive call, CS must be NULL. Otherwise it must
2126 contain the corresponding call graph edge. */
2127
2128 void
2129 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2130 ipa_parm_adjustment_vec adjustments)
2131 {
2132 VEC(tree, heap) *vargs;
2133 gimple new_stmt;
2134 gimple_stmt_iterator gsi;
2135 tree callee_decl;
2136 int i, len;
2137
2138 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2139 vargs = VEC_alloc (tree, heap, len);
2140
2141 gsi = gsi_for_stmt (stmt);
2142 for (i = 0; i < len; i++)
2143 {
2144 struct ipa_parm_adjustment *adj;
2145
2146 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2147
2148 if (adj->copy_param)
2149 {
2150 tree arg = gimple_call_arg (stmt, adj->base_index);
2151
2152 VEC_quick_push (tree, vargs, arg);
2153 }
2154 else if (!adj->remove_param)
2155 {
2156 tree expr, orig_expr;
2157 bool allow_ptr, repl_found;
2158
2159 orig_expr = expr = gimple_call_arg (stmt, adj->base_index);
2160 if (TREE_CODE (expr) == ADDR_EXPR)
2161 {
2162 allow_ptr = false;
2163 expr = TREE_OPERAND (expr, 0);
2164 }
2165 else
2166 allow_ptr = true;
2167
2168 repl_found = build_ref_for_offset (&expr, TREE_TYPE (expr),
2169 adj->offset, adj->type,
2170 allow_ptr);
2171 if (repl_found)
2172 {
2173 if (adj->by_ref)
2174 expr = build_fold_addr_expr (expr);
2175 }
2176 else
2177 {
2178 tree ptrtype = build_pointer_type (adj->type);
2179 expr = orig_expr;
2180 if (!POINTER_TYPE_P (TREE_TYPE (expr)))
2181 expr = build_fold_addr_expr (expr);
2182 if (!useless_type_conversion_p (ptrtype, TREE_TYPE (expr)))
2183 expr = fold_convert (ptrtype, expr);
2184 expr = fold_build2 (POINTER_PLUS_EXPR, ptrtype, expr,
2185 build_int_cst (sizetype,
2186 adj->offset / BITS_PER_UNIT));
2187 if (!adj->by_ref)
2188 expr = fold_build1 (INDIRECT_REF, adj->type, expr);
2189 }
2190 expr = force_gimple_operand_gsi (&gsi, expr,
2191 adj->by_ref
2192 || is_gimple_reg_type (adj->type),
2193 NULL, true, GSI_SAME_STMT);
2194 VEC_quick_push (tree, vargs, expr);
2195 }
2196 }
2197
2198 if (dump_file && (dump_flags & TDF_DETAILS))
2199 {
2200 fprintf (dump_file, "replacing stmt:");
2201 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2202 }
2203
2204 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2205 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2206 VEC_free (tree, heap, vargs);
2207 if (gimple_call_lhs (stmt))
2208 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2209
2210 gimple_set_block (new_stmt, gimple_block (stmt));
2211 if (gimple_has_location (stmt))
2212 gimple_set_location (new_stmt, gimple_location (stmt));
2213 gimple_call_copy_flags (new_stmt, stmt);
2214 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2215
2216 if (dump_file && (dump_flags & TDF_DETAILS))
2217 {
2218 fprintf (dump_file, "with stmt:");
2219 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2220 fprintf (dump_file, "\n");
2221 }
2222 gsi_replace (&gsi, new_stmt, true);
2223 if (cs)
2224 cgraph_set_call_stmt (cs, new_stmt);
2225 update_ssa (TODO_update_ssa);
2226 free_dominance_info (CDI_DOMINATORS);
2227 }
2228
2229 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2230
2231 static bool
2232 index_in_adjustments_multiple_times_p (int base_index,
2233 ipa_parm_adjustment_vec adjustments)
2234 {
2235 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2236 bool one = false;
2237
2238 for (i = 0; i < len; i++)
2239 {
2240 struct ipa_parm_adjustment *adj;
2241 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2242
2243 if (adj->base_index == base_index)
2244 {
2245 if (one)
2246 return true;
2247 else
2248 one = true;
2249 }
2250 }
2251 return false;
2252 }
2253
2254
2255 /* Return adjustments that should have the same effect on function parameters
2256 and call arguments as if they were first changed according to adjustments in
2257 INNER and then by adjustments in OUTER. */
2258
2259 ipa_parm_adjustment_vec
2260 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2261 ipa_parm_adjustment_vec outer)
2262 {
2263 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2264 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2265 int removals = 0;
2266 ipa_parm_adjustment_vec adjustments, tmp;
2267
2268 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2269 for (i = 0; i < inlen; i++)
2270 {
2271 struct ipa_parm_adjustment *n;
2272 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2273
2274 if (n->remove_param)
2275 removals++;
2276 else
2277 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2278 }
2279
2280 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2281 for (i = 0; i < outlen; i++)
2282 {
2283 struct ipa_parm_adjustment *r;
2284 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2285 outer, i);
2286 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2287 out->base_index);
2288
2289 gcc_assert (!in->remove_param);
2290 if (out->remove_param)
2291 {
2292 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2293 {
2294 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2295 memset (r, 0, sizeof (*r));
2296 r->remove_param = true;
2297 }
2298 continue;
2299 }
2300
2301 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2302 memset (r, 0, sizeof (*r));
2303 r->base_index = in->base_index;
2304 r->type = out->type;
2305
2306 /* FIXME: Create nonlocal value too. */
2307
2308 if (in->copy_param && out->copy_param)
2309 r->copy_param = true;
2310 else if (in->copy_param)
2311 r->offset = out->offset;
2312 else if (out->copy_param)
2313 r->offset = in->offset;
2314 else
2315 r->offset = in->offset + out->offset;
2316 }
2317
2318 for (i = 0; i < inlen; i++)
2319 {
2320 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2321 inner, i);
2322
2323 if (n->remove_param)
2324 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2325 }
2326
2327 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2328 return adjustments;
2329 }
2330
2331 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2332 friendly way, assuming they are meant to be applied to FNDECL. */
2333
2334 void
2335 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2336 tree fndecl)
2337 {
2338 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2339 bool first = true;
2340 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2341
2342 fprintf (file, "IPA param adjustments: ");
2343 for (i = 0; i < len; i++)
2344 {
2345 struct ipa_parm_adjustment *adj;
2346 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2347
2348 if (!first)
2349 fprintf (file, " ");
2350 else
2351 first = false;
2352
2353 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2354 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2355 if (adj->base)
2356 {
2357 fprintf (file, ", base: ");
2358 print_generic_expr (file, adj->base, 0);
2359 }
2360 if (adj->reduction)
2361 {
2362 fprintf (file, ", reduction: ");
2363 print_generic_expr (file, adj->reduction, 0);
2364 }
2365 if (adj->new_ssa_base)
2366 {
2367 fprintf (file, ", new_ssa_base: ");
2368 print_generic_expr (file, adj->new_ssa_base, 0);
2369 }
2370
2371 if (adj->copy_param)
2372 fprintf (file, ", copy_param");
2373 else if (adj->remove_param)
2374 fprintf (file, ", remove_param");
2375 else
2376 fprintf (file, ", offset %li", (long) adj->offset);
2377 if (adj->by_ref)
2378 fprintf (file, ", by_ref");
2379 print_node_brief (file, ", type: ", adj->type, 0);
2380 fprintf (file, "\n");
2381 }
2382 VEC_free (tree, heap, parms);
2383 }
2384
2385 /* Stream out jump function JUMP_FUNC to OB. */
2386
2387 static void
2388 ipa_write_jump_function (struct output_block *ob,
2389 struct ipa_jump_func *jump_func)
2390 {
2391 lto_output_uleb128_stream (ob->main_stream,
2392 jump_func->type);
2393
2394 switch (jump_func->type)
2395 {
2396 case IPA_JF_UNKNOWN:
2397 break;
2398 case IPA_JF_KNOWN_TYPE:
2399 lto_output_tree (ob, jump_func->value.base_binfo, true);
2400 break;
2401 case IPA_JF_CONST:
2402 lto_output_tree (ob, jump_func->value.constant, true);
2403 break;
2404 case IPA_JF_PASS_THROUGH:
2405 lto_output_tree (ob, jump_func->value.pass_through.operand, true);
2406 lto_output_uleb128_stream (ob->main_stream,
2407 jump_func->value.pass_through.formal_id);
2408 lto_output_uleb128_stream (ob->main_stream,
2409 jump_func->value.pass_through.operation);
2410 break;
2411 case IPA_JF_ANCESTOR:
2412 lto_output_uleb128_stream (ob->main_stream,
2413 jump_func->value.ancestor.offset);
2414 lto_output_tree (ob, jump_func->value.ancestor.type, true);
2415 lto_output_uleb128_stream (ob->main_stream,
2416 jump_func->value.ancestor.formal_id);
2417 break;
2418 case IPA_JF_CONST_MEMBER_PTR:
2419 lto_output_tree (ob, jump_func->value.member_cst.pfn, true);
2420 lto_output_tree (ob, jump_func->value.member_cst.delta, false);
2421 break;
2422 }
2423 }
2424
2425 /* Read in jump function JUMP_FUNC from IB. */
2426
2427 static void
2428 ipa_read_jump_function (struct lto_input_block *ib,
2429 struct ipa_jump_func *jump_func,
2430 struct data_in *data_in)
2431 {
2432 jump_func->type = (enum jump_func_type) lto_input_uleb128 (ib);
2433
2434 switch (jump_func->type)
2435 {
2436 case IPA_JF_UNKNOWN:
2437 break;
2438 case IPA_JF_KNOWN_TYPE:
2439 jump_func->value.base_binfo = lto_input_tree (ib, data_in);
2440 break;
2441 case IPA_JF_CONST:
2442 jump_func->value.constant = lto_input_tree (ib, data_in);
2443 break;
2444 case IPA_JF_PASS_THROUGH:
2445 jump_func->value.pass_through.operand = lto_input_tree (ib, data_in);
2446 jump_func->value.pass_through.formal_id = lto_input_uleb128 (ib);
2447 jump_func->value.pass_through.operation = (enum tree_code) lto_input_uleb128 (ib);
2448 break;
2449 case IPA_JF_ANCESTOR:
2450 jump_func->value.ancestor.offset = lto_input_uleb128 (ib);
2451 jump_func->value.ancestor.type = lto_input_tree (ib, data_in);
2452 jump_func->value.ancestor.formal_id = lto_input_uleb128 (ib);
2453 break;
2454 case IPA_JF_CONST_MEMBER_PTR:
2455 jump_func->value.member_cst.pfn = lto_input_tree (ib, data_in);
2456 jump_func->value.member_cst.delta = lto_input_tree (ib, data_in);
2457 break;
2458 }
2459 }
2460
2461 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2462 relevant to indirect inlining to OB. */
2463
2464 static void
2465 ipa_write_indirect_edge_info (struct output_block *ob,
2466 struct cgraph_edge *cs)
2467 {
2468 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2469 struct bitpack_d bp;
2470
2471 lto_output_sleb128_stream (ob->main_stream, ii->param_index);
2472 lto_output_sleb128_stream (ob->main_stream, ii->anc_offset);
2473 bp = bitpack_create (ob->main_stream);
2474 bp_pack_value (&bp, ii->polymorphic, 1);
2475 lto_output_bitpack (&bp);
2476
2477 if (ii->polymorphic)
2478 {
2479 lto_output_sleb128_stream (ob->main_stream, ii->otr_token);
2480 lto_output_tree (ob, ii->otr_type, true);
2481 }
2482 }
2483
2484 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2485 relevant to indirect inlining from IB. */
2486
2487 static void
2488 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2489 struct data_in *data_in ATTRIBUTE_UNUSED,
2490 struct cgraph_edge *cs)
2491 {
2492 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2493 struct bitpack_d bp;
2494
2495 ii->param_index = (int) lto_input_sleb128 (ib);
2496 ii->anc_offset = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2497 bp = lto_input_bitpack (ib);
2498 ii->polymorphic = bp_unpack_value (&bp, 1);
2499 if (ii->polymorphic)
2500 {
2501 ii->otr_token = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2502 ii->otr_type = lto_input_tree (ib, data_in);
2503 }
2504 }
2505
2506 /* Stream out NODE info to OB. */
2507
2508 static void
2509 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2510 {
2511 int node_ref;
2512 lto_cgraph_encoder_t encoder;
2513 struct ipa_node_params *info = IPA_NODE_REF (node);
2514 int j;
2515 struct cgraph_edge *e;
2516 struct bitpack_d bp;
2517
2518 encoder = ob->decl_state->cgraph_node_encoder;
2519 node_ref = lto_cgraph_encoder_encode (encoder, node);
2520 lto_output_uleb128_stream (ob->main_stream, node_ref);
2521
2522 bp = bitpack_create (ob->main_stream);
2523 bp_pack_value (&bp, info->called_with_var_arguments, 1);
2524 gcc_assert (info->uses_analysis_done
2525 || ipa_get_param_count (info) == 0);
2526 gcc_assert (!info->node_enqueued);
2527 gcc_assert (!info->ipcp_orig_node);
2528 for (j = 0; j < ipa_get_param_count (info); j++)
2529 bp_pack_value (&bp, info->params[j].used, 1);
2530 lto_output_bitpack (&bp);
2531 for (e = node->callees; e; e = e->next_callee)
2532 {
2533 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2534
2535 lto_output_uleb128_stream (ob->main_stream,
2536 ipa_get_cs_argument_count (args));
2537 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2538 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2539 }
2540 for (e = node->indirect_calls; e; e = e->next_callee)
2541 ipa_write_indirect_edge_info (ob, e);
2542 }
2543
2544 /* Srtream in NODE info from IB. */
2545
2546 static void
2547 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2548 struct data_in *data_in)
2549 {
2550 struct ipa_node_params *info = IPA_NODE_REF (node);
2551 int k;
2552 struct cgraph_edge *e;
2553 struct bitpack_d bp;
2554
2555 ipa_initialize_node_params (node);
2556
2557 bp = lto_input_bitpack (ib);
2558 info->called_with_var_arguments = bp_unpack_value (&bp, 1);
2559 if (ipa_get_param_count (info) != 0)
2560 info->uses_analysis_done = true;
2561 info->node_enqueued = false;
2562 for (k = 0; k < ipa_get_param_count (info); k++)
2563 info->params[k].used = bp_unpack_value (&bp, 1);
2564 for (e = node->callees; e; e = e->next_callee)
2565 {
2566 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2567 int count = lto_input_uleb128 (ib);
2568
2569 ipa_set_cs_argument_count (args, count);
2570 if (!count)
2571 continue;
2572
2573 args->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
2574 (ipa_get_cs_argument_count (args));
2575 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2576 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2577 }
2578 for (e = node->indirect_calls; e; e = e->next_callee)
2579 ipa_read_indirect_edge_info (ib, data_in, e);
2580 }
2581
2582 /* Write jump functions for nodes in SET. */
2583
2584 void
2585 ipa_prop_write_jump_functions (cgraph_node_set set)
2586 {
2587 struct cgraph_node *node;
2588 struct output_block *ob = create_output_block (LTO_section_jump_functions);
2589 unsigned int count = 0;
2590 cgraph_node_set_iterator csi;
2591
2592 ob->cgraph_node = NULL;
2593
2594 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2595 {
2596 node = csi_node (csi);
2597 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2598 count++;
2599 }
2600
2601 lto_output_uleb128_stream (ob->main_stream, count);
2602
2603 /* Process all of the functions. */
2604 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2605 {
2606 node = csi_node (csi);
2607 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2608 ipa_write_node_info (ob, node);
2609 }
2610 lto_output_1_stream (ob->main_stream, 0);
2611 produce_asm (ob, NULL);
2612 destroy_output_block (ob);
2613 }
2614
2615 /* Read section in file FILE_DATA of length LEN with data DATA. */
2616
2617 static void
2618 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
2619 size_t len)
2620 {
2621 const struct lto_function_header *header =
2622 (const struct lto_function_header *) data;
2623 const int32_t cfg_offset = sizeof (struct lto_function_header);
2624 const int32_t main_offset = cfg_offset + header->cfg_size;
2625 const int32_t string_offset = main_offset + header->main_size;
2626 struct data_in *data_in;
2627 struct lto_input_block ib_main;
2628 unsigned int i;
2629 unsigned int count;
2630
2631 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2632 header->main_size);
2633
2634 data_in =
2635 lto_data_in_create (file_data, (const char *) data + string_offset,
2636 header->string_size, NULL);
2637 count = lto_input_uleb128 (&ib_main);
2638
2639 for (i = 0; i < count; i++)
2640 {
2641 unsigned int index;
2642 struct cgraph_node *node;
2643 lto_cgraph_encoder_t encoder;
2644
2645 index = lto_input_uleb128 (&ib_main);
2646 encoder = file_data->cgraph_node_encoder;
2647 node = lto_cgraph_encoder_deref (encoder, index);
2648 gcc_assert (node->analyzed);
2649 ipa_read_node_info (&ib_main, node, data_in);
2650 }
2651 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
2652 len);
2653 lto_data_in_delete (data_in);
2654 }
2655
2656 /* Read ipcp jump functions. */
2657
2658 void
2659 ipa_prop_read_jump_functions (void)
2660 {
2661 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2662 struct lto_file_decl_data *file_data;
2663 unsigned int j = 0;
2664
2665 ipa_check_create_node_params ();
2666 ipa_check_create_edge_args ();
2667 ipa_register_cgraph_hooks ();
2668
2669 while ((file_data = file_data_vec[j++]))
2670 {
2671 size_t len;
2672 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
2673
2674 if (data)
2675 ipa_prop_read_section (file_data, data, len);
2676 }
2677 }
2678
2679 /* After merging units, we can get mismatch in argument counts.
2680 Also decl merging might've rendered parameter lists obsolette.
2681 Also compute called_with_variable_arg info. */
2682
2683 void
2684 ipa_update_after_lto_read (void)
2685 {
2686 struct cgraph_node *node;
2687 struct cgraph_edge *cs;
2688
2689 ipa_check_create_node_params ();
2690 ipa_check_create_edge_args ();
2691
2692 for (node = cgraph_nodes; node; node = node->next)
2693 if (node->analyzed)
2694 ipa_initialize_node_params (node);
2695
2696 for (node = cgraph_nodes; node; node = node->next)
2697 if (node->analyzed)
2698 for (cs = node->callees; cs; cs = cs->next_callee)
2699 {
2700 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
2701 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
2702 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
2703 }
2704 }