In gcc/testsuite/: 2010-12-19 Nicola Pero <nicola.pero@meta-innovation.com>
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "timevar.h"
36 #include "flags.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
41
42
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
45
46 struct param_analysis_info
47 {
48 bool modified;
49 bitmap visited_statements;
50 };
51
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
56
57 /* Bitmap with all UIDs of call graph edges that have been already processed
58 by indirect inlining. */
59 static bitmap iinlining_processed_edges;
60
61 /* Holders of ipa cgraph hooks: */
62 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
63 static struct cgraph_node_hook_list *node_removal_hook_holder;
64 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
65 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
66
67 /* Add cgraph NODE described by INFO to the worklist WL regardless of whether
68 it is in one or not. It should almost never be used directly, as opposed to
69 ipa_push_func_to_list. */
70
71 void
72 ipa_push_func_to_list_1 (struct ipa_func_list **wl,
73 struct cgraph_node *node,
74 struct ipa_node_params *info)
75 {
76 struct ipa_func_list *temp;
77
78 info->node_enqueued = 1;
79 temp = XCNEW (struct ipa_func_list);
80 temp->node = node;
81 temp->next = *wl;
82 *wl = temp;
83 }
84
85 /* Initialize worklist to contain all functions. */
86
87 struct ipa_func_list *
88 ipa_init_func_list (void)
89 {
90 struct cgraph_node *node;
91 struct ipa_func_list * wl;
92
93 wl = NULL;
94 for (node = cgraph_nodes; node; node = node->next)
95 if (node->analyzed)
96 {
97 struct ipa_node_params *info = IPA_NODE_REF (node);
98 /* Unreachable nodes should have been eliminated before ipcp and
99 inlining. */
100 gcc_assert (node->needed || node->reachable);
101 ipa_push_func_to_list_1 (&wl, node, info);
102 }
103
104 return wl;
105 }
106
107 /* Remove a function from the worklist WL and return it. */
108
109 struct cgraph_node *
110 ipa_pop_func_from_list (struct ipa_func_list **wl)
111 {
112 struct ipa_node_params *info;
113 struct ipa_func_list *first;
114 struct cgraph_node *node;
115
116 first = *wl;
117 *wl = (*wl)->next;
118 node = first->node;
119 free (first);
120
121 info = IPA_NODE_REF (node);
122 info->node_enqueued = 0;
123 return node;
124 }
125
126 /* Return index of the formal whose tree is PTREE in function which corresponds
127 to INFO. */
128
129 static int
130 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
131 {
132 int i, count;
133
134 count = ipa_get_param_count (info);
135 for (i = 0; i < count; i++)
136 if (ipa_get_param(info, i) == ptree)
137 return i;
138
139 return -1;
140 }
141
142 /* Populate the param_decl field in parameter descriptors of INFO that
143 corresponds to NODE. */
144
145 static void
146 ipa_populate_param_decls (struct cgraph_node *node,
147 struct ipa_node_params *info)
148 {
149 tree fndecl;
150 tree fnargs;
151 tree parm;
152 int param_num;
153
154 fndecl = node->decl;
155 fnargs = DECL_ARGUMENTS (fndecl);
156 param_num = 0;
157 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
158 {
159 info->params[param_num].decl = parm;
160 param_num++;
161 }
162 }
163
164 /* Return how many formal parameters FNDECL has. */
165
166 static inline int
167 count_formal_params_1 (tree fndecl)
168 {
169 tree parm;
170 int count = 0;
171
172 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
173 count++;
174
175 return count;
176 }
177
178 /* Count number of formal parameters in NOTE. Store the result to the
179 appropriate field of INFO. */
180
181 static void
182 ipa_count_formal_params (struct cgraph_node *node,
183 struct ipa_node_params *info)
184 {
185 int param_num;
186
187 param_num = count_formal_params_1 (node->decl);
188 ipa_set_param_count (info, param_num);
189 }
190
191 /* Initialize the ipa_node_params structure associated with NODE by counting
192 the function parameters, creating the descriptors and populating their
193 param_decls. */
194
195 void
196 ipa_initialize_node_params (struct cgraph_node *node)
197 {
198 struct ipa_node_params *info = IPA_NODE_REF (node);
199
200 if (!info->params)
201 {
202 ipa_count_formal_params (node, info);
203 info->params = XCNEWVEC (struct ipa_param_descriptor,
204 ipa_get_param_count (info));
205 ipa_populate_param_decls (node, info);
206 }
207 }
208
209 /* Count number of arguments callsite CS has and store it in
210 ipa_edge_args structure corresponding to this callsite. */
211
212 static void
213 ipa_count_arguments (struct cgraph_edge *cs)
214 {
215 gimple stmt;
216 int arg_num;
217
218 stmt = cs->call_stmt;
219 gcc_assert (is_gimple_call (stmt));
220 arg_num = gimple_call_num_args (stmt);
221 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
222 <= (unsigned) cgraph_edge_max_uid)
223 VEC_safe_grow_cleared (ipa_edge_args_t, gc,
224 ipa_edge_args_vector, cgraph_edge_max_uid + 1);
225 ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
226 }
227
228 /* Print the jump functions associated with call graph edge CS to file F. */
229
230 static void
231 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
232 {
233 int i, count;
234
235 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
236 for (i = 0; i < count; i++)
237 {
238 struct ipa_jump_func *jump_func;
239 enum jump_func_type type;
240
241 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
242 type = jump_func->type;
243
244 fprintf (f, " param %d: ", i);
245 if (type == IPA_JF_UNKNOWN)
246 fprintf (f, "UNKNOWN\n");
247 else if (type == IPA_JF_KNOWN_TYPE)
248 {
249 tree binfo_type = TREE_TYPE (jump_func->value.base_binfo);
250 fprintf (f, "KNOWN TYPE, type in binfo is: ");
251 print_generic_expr (f, binfo_type, 0);
252 fprintf (f, " (%u)\n", TYPE_UID (binfo_type));
253 }
254 else if (type == IPA_JF_CONST)
255 {
256 tree val = jump_func->value.constant;
257 fprintf (f, "CONST: ");
258 print_generic_expr (f, val, 0);
259 if (TREE_CODE (val) == ADDR_EXPR
260 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
261 {
262 fprintf (f, " -> ");
263 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
264 0);
265 }
266 fprintf (f, "\n");
267 }
268 else if (type == IPA_JF_CONST_MEMBER_PTR)
269 {
270 fprintf (f, "CONST MEMBER PTR: ");
271 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
272 fprintf (f, ", ");
273 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
274 fprintf (f, "\n");
275 }
276 else if (type == IPA_JF_PASS_THROUGH)
277 {
278 fprintf (f, "PASS THROUGH: ");
279 fprintf (f, "%d, op %s ",
280 jump_func->value.pass_through.formal_id,
281 tree_code_name[(int)
282 jump_func->value.pass_through.operation]);
283 if (jump_func->value.pass_through.operation != NOP_EXPR)
284 print_generic_expr (dump_file,
285 jump_func->value.pass_through.operand, 0);
286 fprintf (dump_file, "\n");
287 }
288 else if (type == IPA_JF_ANCESTOR)
289 {
290 fprintf (f, "ANCESTOR: ");
291 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
292 jump_func->value.ancestor.formal_id,
293 jump_func->value.ancestor.offset);
294 print_generic_expr (f, jump_func->value.ancestor.type, 0);
295 fprintf (dump_file, "\n");
296 }
297 }
298 }
299
300
301 /* Print the jump functions of all arguments on all call graph edges going from
302 NODE to file F. */
303
304 void
305 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
306 {
307 struct cgraph_edge *cs;
308 int i;
309
310 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
311 for (cs = node->callees; cs; cs = cs->next_callee)
312 {
313 if (!ipa_edge_args_info_available_for_edge_p (cs))
314 continue;
315
316 fprintf (f, " callsite %s/%i -> %s/%i : \n",
317 cgraph_node_name (node), node->uid,
318 cgraph_node_name (cs->callee), cs->callee->uid);
319 ipa_print_node_jump_functions_for_edge (f, cs);
320 }
321
322 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
323 {
324 if (!ipa_edge_args_info_available_for_edge_p (cs))
325 continue;
326
327 if (cs->call_stmt)
328 {
329 fprintf (f, " indirect callsite %d for stmt ", i);
330 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
331 }
332 else
333 fprintf (f, " indirect callsite %d :\n", i);
334 ipa_print_node_jump_functions_for_edge (f, cs);
335
336 }
337 }
338
339 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
340
341 void
342 ipa_print_all_jump_functions (FILE *f)
343 {
344 struct cgraph_node *node;
345
346 fprintf (f, "\nJump functions:\n");
347 for (node = cgraph_nodes; node; node = node->next)
348 {
349 ipa_print_node_jump_functions (f, node);
350 }
351 }
352
353 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
354 of an assignment statement STMT, try to find out whether NAME can be
355 described by a (possibly polynomial) pass-through jump-function or an
356 ancestor jump function and if so, write the appropriate function into
357 JFUNC */
358
359 static void
360 compute_complex_assign_jump_func (struct ipa_node_params *info,
361 struct ipa_jump_func *jfunc,
362 gimple stmt, tree name)
363 {
364 HOST_WIDE_INT offset, size, max_size;
365 tree op1, op2, type;
366 int index;
367
368 op1 = gimple_assign_rhs1 (stmt);
369 op2 = gimple_assign_rhs2 (stmt);
370
371 if (TREE_CODE (op1) == SSA_NAME
372 && SSA_NAME_IS_DEFAULT_DEF (op1))
373 {
374 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
375 if (index < 0)
376 return;
377
378 if (op2)
379 {
380 if (!is_gimple_ip_invariant (op2)
381 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
382 && !useless_type_conversion_p (TREE_TYPE (name),
383 TREE_TYPE (op1))))
384 return;
385
386 jfunc->type = IPA_JF_PASS_THROUGH;
387 jfunc->value.pass_through.formal_id = index;
388 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
389 jfunc->value.pass_through.operand = op2;
390 }
391 else if (gimple_assign_unary_nop_p (stmt))
392 {
393 jfunc->type = IPA_JF_PASS_THROUGH;
394 jfunc->value.pass_through.formal_id = index;
395 jfunc->value.pass_through.operation = NOP_EXPR;
396 }
397 return;
398 }
399
400 if (TREE_CODE (op1) != ADDR_EXPR)
401 return;
402
403 op1 = TREE_OPERAND (op1, 0);
404 type = TREE_TYPE (op1);
405 if (TREE_CODE (type) != RECORD_TYPE)
406 return;
407 op1 = get_ref_base_and_extent (op1, &offset, &size, &max_size);
408 if (TREE_CODE (op1) != MEM_REF
409 /* If this is a varying address, punt. */
410 || max_size == -1
411 || max_size != size)
412 return;
413 offset += mem_ref_offset (op1).low * BITS_PER_UNIT;
414 op1 = TREE_OPERAND (op1, 0);
415 if (TREE_CODE (op1) != SSA_NAME
416 || !SSA_NAME_IS_DEFAULT_DEF (op1)
417 || offset < 0)
418 return;
419
420 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
421 if (index >= 0)
422 {
423 jfunc->type = IPA_JF_ANCESTOR;
424 jfunc->value.ancestor.formal_id = index;
425 jfunc->value.ancestor.offset = offset;
426 jfunc->value.ancestor.type = type;
427 }
428 }
429
430
431 /* Given that an actual argument is an SSA_NAME that is a result of a phi
432 statement PHI, try to find out whether NAME is in fact a
433 multiple-inheritance typecast from a descendant into an ancestor of a formal
434 parameter and thus can be described by an ancestor jump function and if so,
435 write the appropriate function into JFUNC.
436
437 Essentially we want to match the following pattern:
438
439 if (obj_2(D) != 0B)
440 goto <bb 3>;
441 else
442 goto <bb 4>;
443
444 <bb 3>:
445 iftmp.1_3 = &obj_2(D)->D.1762;
446
447 <bb 4>:
448 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
449 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
450 return D.1879_6; */
451
452 static void
453 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
454 struct ipa_jump_func *jfunc,
455 gimple phi)
456 {
457 HOST_WIDE_INT offset, size, max_size;
458 gimple assign, cond;
459 basic_block phi_bb, assign_bb, cond_bb;
460 tree tmp, parm, expr;
461 int index, i;
462
463 if (gimple_phi_num_args (phi) != 2)
464 return;
465
466 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
467 tmp = PHI_ARG_DEF (phi, 0);
468 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
469 tmp = PHI_ARG_DEF (phi, 1);
470 else
471 return;
472 if (TREE_CODE (tmp) != SSA_NAME
473 || SSA_NAME_IS_DEFAULT_DEF (tmp)
474 || !POINTER_TYPE_P (TREE_TYPE (tmp))
475 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
476 return;
477
478 assign = SSA_NAME_DEF_STMT (tmp);
479 assign_bb = gimple_bb (assign);
480 if (!single_pred_p (assign_bb)
481 || !gimple_assign_single_p (assign))
482 return;
483 expr = gimple_assign_rhs1 (assign);
484
485 if (TREE_CODE (expr) != ADDR_EXPR)
486 return;
487 expr = TREE_OPERAND (expr, 0);
488 expr = get_ref_base_and_extent (expr, &offset, &size, &max_size);
489
490 if (TREE_CODE (expr) != MEM_REF
491 /* If this is a varying address, punt. */
492 || max_size == -1
493 || max_size != size)
494 return;
495 offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
496 parm = TREE_OPERAND (expr, 0);
497 if (TREE_CODE (parm) != SSA_NAME
498 || !SSA_NAME_IS_DEFAULT_DEF (parm)
499 || offset < 0)
500 return;
501
502 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
503 if (index < 0)
504 return;
505
506 cond_bb = single_pred (assign_bb);
507 cond = last_stmt (cond_bb);
508 if (!cond
509 || gimple_code (cond) != GIMPLE_COND
510 || gimple_cond_code (cond) != NE_EXPR
511 || gimple_cond_lhs (cond) != parm
512 || !integer_zerop (gimple_cond_rhs (cond)))
513 return;
514
515
516 phi_bb = gimple_bb (phi);
517 for (i = 0; i < 2; i++)
518 {
519 basic_block pred = EDGE_PRED (phi_bb, i)->src;
520 if (pred != assign_bb && pred != cond_bb)
521 return;
522 }
523
524 jfunc->type = IPA_JF_ANCESTOR;
525 jfunc->value.ancestor.formal_id = index;
526 jfunc->value.ancestor.offset = offset;
527 jfunc->value.ancestor.type = TREE_TYPE (TREE_TYPE (tmp));
528 }
529
530 /* Given OP whch is passed as an actual argument to a called function,
531 determine if it is possible to construct a KNOWN_TYPE jump function for it
532 and if so, create one and store it to JFUNC. */
533
534 static void
535 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc)
536 {
537 tree binfo;
538
539 if (TREE_CODE (op) != ADDR_EXPR)
540 return;
541
542 op = TREE_OPERAND (op, 0);
543 binfo = gimple_get_relevant_ref_binfo (op, NULL_TREE);
544 if (binfo)
545 {
546 jfunc->type = IPA_JF_KNOWN_TYPE;
547 jfunc->value.base_binfo = binfo;
548 }
549 }
550
551
552 /* Determine the jump functions of scalar arguments. Scalar means SSA names
553 and constants of a number of selected types. INFO is the ipa_node_params
554 structure associated with the caller, FUNCTIONS is a pointer to an array of
555 jump function structures associated with CALL which is the call statement
556 being examined.*/
557
558 static void
559 compute_scalar_jump_functions (struct ipa_node_params *info,
560 struct ipa_jump_func *functions,
561 gimple call)
562 {
563 tree arg;
564 unsigned num = 0;
565
566 for (num = 0; num < gimple_call_num_args (call); num++)
567 {
568 arg = gimple_call_arg (call, num);
569
570 if (is_gimple_ip_invariant (arg))
571 {
572 functions[num].type = IPA_JF_CONST;
573 functions[num].value.constant = arg;
574 }
575 else if (TREE_CODE (arg) == SSA_NAME)
576 {
577 if (SSA_NAME_IS_DEFAULT_DEF (arg))
578 {
579 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
580
581 if (index >= 0)
582 {
583 functions[num].type = IPA_JF_PASS_THROUGH;
584 functions[num].value.pass_through.formal_id = index;
585 functions[num].value.pass_through.operation = NOP_EXPR;
586 }
587 }
588 else
589 {
590 gimple stmt = SSA_NAME_DEF_STMT (arg);
591 if (is_gimple_assign (stmt))
592 compute_complex_assign_jump_func (info, &functions[num],
593 stmt, arg);
594 else if (gimple_code (stmt) == GIMPLE_PHI)
595 compute_complex_ancestor_jump_func (info, &functions[num],
596 stmt);
597 }
598 }
599 else
600 compute_known_type_jump_func (arg, &functions[num]);
601 }
602 }
603
604 /* Inspect the given TYPE and return true iff it has the same structure (the
605 same number of fields of the same types) as a C++ member pointer. If
606 METHOD_PTR and DELTA are non-NULL, store the trees representing the
607 corresponding fields there. */
608
609 static bool
610 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
611 {
612 tree fld;
613
614 if (TREE_CODE (type) != RECORD_TYPE)
615 return false;
616
617 fld = TYPE_FIELDS (type);
618 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
619 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
620 return false;
621
622 if (method_ptr)
623 *method_ptr = fld;
624
625 fld = DECL_CHAIN (fld);
626 if (!fld || INTEGRAL_TYPE_P (fld))
627 return false;
628 if (delta)
629 *delta = fld;
630
631 if (DECL_CHAIN (fld))
632 return false;
633
634 return true;
635 }
636
637 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
638 boolean variable pointed to by DATA. */
639
640 static bool
641 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
642 void *data)
643 {
644 bool *b = (bool *) data;
645 *b = true;
646 return true;
647 }
648
649 /* Return true if the formal parameter PARM might have been modified in this
650 function before reaching the statement CALL. PARM_INFO is a pointer to a
651 structure containing intermediate information about PARM. */
652
653 static bool
654 is_parm_modified_before_call (struct param_analysis_info *parm_info,
655 gimple call, tree parm)
656 {
657 bool modified = false;
658 ao_ref refd;
659
660 if (parm_info->modified)
661 return true;
662
663 ao_ref_init (&refd, parm);
664 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
665 &modified, &parm_info->visited_statements);
666 if (modified)
667 {
668 parm_info->modified = true;
669 return true;
670 }
671 return false;
672 }
673
674 /* Go through arguments of the CALL and for every one that looks like a member
675 pointer, check whether it can be safely declared pass-through and if so,
676 mark that to the corresponding item of jump FUNCTIONS. Return true iff
677 there are non-pass-through member pointers within the arguments. INFO
678 describes formal parameters of the caller. PARMS_INFO is a pointer to a
679 vector containing intermediate information about each formal parameter. */
680
681 static bool
682 compute_pass_through_member_ptrs (struct ipa_node_params *info,
683 struct param_analysis_info *parms_info,
684 struct ipa_jump_func *functions,
685 gimple call)
686 {
687 bool undecided_members = false;
688 unsigned num;
689 tree arg;
690
691 for (num = 0; num < gimple_call_num_args (call); num++)
692 {
693 arg = gimple_call_arg (call, num);
694
695 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
696 {
697 if (TREE_CODE (arg) == PARM_DECL)
698 {
699 int index = ipa_get_param_decl_index (info, arg);
700
701 gcc_assert (index >=0);
702 if (!is_parm_modified_before_call (&parms_info[index], call, arg))
703 {
704 functions[num].type = IPA_JF_PASS_THROUGH;
705 functions[num].value.pass_through.formal_id = index;
706 functions[num].value.pass_through.operation = NOP_EXPR;
707 }
708 else
709 undecided_members = true;
710 }
711 else
712 undecided_members = true;
713 }
714 }
715
716 return undecided_members;
717 }
718
719 /* Simple function filling in a member pointer constant jump function (with PFN
720 and DELTA as the constant value) into JFUNC. */
721
722 static void
723 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
724 tree pfn, tree delta)
725 {
726 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
727 jfunc->value.member_cst.pfn = pfn;
728 jfunc->value.member_cst.delta = delta;
729 }
730
731 /* If RHS is an SSA_NAMe and it is defined by a simple copy assign statement,
732 return the rhs of its defining statement. */
733
734 static inline tree
735 get_ssa_def_if_simple_copy (tree rhs)
736 {
737 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
738 {
739 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
740
741 if (gimple_assign_single_p (def_stmt))
742 rhs = gimple_assign_rhs1 (def_stmt);
743 else
744 break;
745 }
746 return rhs;
747 }
748
749 /* Traverse statements from CALL backwards, scanning whether the argument ARG
750 which is a member pointer is filled in with constant values. If it is, fill
751 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
752 fields of the record type of the member pointer. To give an example, we
753 look for a pattern looking like the following:
754
755 D.2515.__pfn ={v} printStuff;
756 D.2515.__delta ={v} 0;
757 i_1 = doprinting (D.2515); */
758
759 static void
760 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
761 tree delta_field, struct ipa_jump_func *jfunc)
762 {
763 gimple_stmt_iterator gsi;
764 tree method = NULL_TREE;
765 tree delta = NULL_TREE;
766
767 gsi = gsi_for_stmt (call);
768
769 gsi_prev (&gsi);
770 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
771 {
772 gimple stmt = gsi_stmt (gsi);
773 tree lhs, rhs, fld;
774
775 if (!stmt_may_clobber_ref_p (stmt, arg))
776 continue;
777 if (!gimple_assign_single_p (stmt))
778 return;
779
780 lhs = gimple_assign_lhs (stmt);
781 rhs = gimple_assign_rhs1 (stmt);
782
783 if (TREE_CODE (lhs) != COMPONENT_REF
784 || TREE_OPERAND (lhs, 0) != arg)
785 return;
786
787 fld = TREE_OPERAND (lhs, 1);
788 if (!method && fld == method_field)
789 {
790 rhs = get_ssa_def_if_simple_copy (rhs);
791 if (TREE_CODE (rhs) == ADDR_EXPR
792 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
793 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
794 {
795 method = TREE_OPERAND (rhs, 0);
796 if (delta)
797 {
798 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
799 return;
800 }
801 }
802 else
803 return;
804 }
805
806 if (!delta && fld == delta_field)
807 {
808 rhs = get_ssa_def_if_simple_copy (rhs);
809 if (TREE_CODE (rhs) == INTEGER_CST)
810 {
811 delta = rhs;
812 if (method)
813 {
814 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
815 return;
816 }
817 }
818 else
819 return;
820 }
821 }
822
823 return;
824 }
825
826 /* Go through the arguments of the CALL and for every member pointer within
827 tries determine whether it is a constant. If it is, create a corresponding
828 constant jump function in FUNCTIONS which is an array of jump functions
829 associated with the call. */
830
831 static void
832 compute_cst_member_ptr_arguments (struct ipa_jump_func *functions,
833 gimple call)
834 {
835 unsigned num;
836 tree arg, method_field, delta_field;
837
838 for (num = 0; num < gimple_call_num_args (call); num++)
839 {
840 arg = gimple_call_arg (call, num);
841
842 if (functions[num].type == IPA_JF_UNKNOWN
843 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
844 &delta_field))
845 determine_cst_member_ptr (call, arg, method_field, delta_field,
846 &functions[num]);
847 }
848 }
849
850 /* Compute jump function for all arguments of callsite CS and insert the
851 information in the jump_functions array in the ipa_edge_args corresponding
852 to this callsite. */
853
854 static void
855 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_info,
856 struct cgraph_edge *cs)
857 {
858 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
859 struct ipa_edge_args *arguments = IPA_EDGE_REF (cs);
860 gimple call;
861
862 if (ipa_get_cs_argument_count (arguments) == 0 || arguments->jump_functions)
863 return;
864 arguments->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
865 (ipa_get_cs_argument_count (arguments));
866
867 call = cs->call_stmt;
868 gcc_assert (is_gimple_call (call));
869
870 /* We will deal with constants and SSA scalars first: */
871 compute_scalar_jump_functions (info, arguments->jump_functions, call);
872
873 /* Let's check whether there are any potential member pointers and if so,
874 whether we can determine their functions as pass_through. */
875 if (!compute_pass_through_member_ptrs (info, parms_info,
876 arguments->jump_functions, call))
877 return;
878
879 /* Finally, let's check whether we actually pass a new constant member
880 pointer here... */
881 compute_cst_member_ptr_arguments (arguments->jump_functions, call);
882 }
883
884 /* Compute jump functions for all edges - both direct and indirect - outgoing
885 from NODE. Also count the actual arguments in the process. */
886
887 static void
888 ipa_compute_jump_functions (struct cgraph_node *node,
889 struct param_analysis_info *parms_info)
890 {
891 struct cgraph_edge *cs;
892
893 for (cs = node->callees; cs; cs = cs->next_callee)
894 {
895 /* We do not need to bother analyzing calls to unknown
896 functions unless they may become known during lto/whopr. */
897 if (!cs->callee->analyzed && !flag_lto)
898 continue;
899 ipa_count_arguments (cs);
900 /* If the descriptor of the callee is not initialized yet, we have to do
901 it now. */
902 if (cs->callee->analyzed)
903 ipa_initialize_node_params (cs->callee);
904 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
905 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
906 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
907 ipa_compute_jump_functions_for_edge (parms_info, cs);
908 }
909
910 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
911 {
912 ipa_count_arguments (cs);
913 ipa_compute_jump_functions_for_edge (parms_info, cs);
914 }
915 }
916
917 /* If RHS looks like a rhs of a statement loading pfn from a member
918 pointer formal parameter, return the parameter, otherwise return
919 NULL. If USE_DELTA, then we look for a use of the delta field
920 rather than the pfn. */
921
922 static tree
923 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
924 {
925 tree rec, ref_field, ref_offset, fld, fld_offset, ptr_field, delta_field;
926
927 if (TREE_CODE (rhs) == COMPONENT_REF)
928 {
929 ref_field = TREE_OPERAND (rhs, 1);
930 rhs = TREE_OPERAND (rhs, 0);
931 }
932 else
933 ref_field = NULL_TREE;
934 if (TREE_CODE (rhs) != MEM_REF)
935 return NULL_TREE;
936 rec = TREE_OPERAND (rhs, 0);
937 if (TREE_CODE (rec) != ADDR_EXPR)
938 return NULL_TREE;
939 rec = TREE_OPERAND (rec, 0);
940 if (TREE_CODE (rec) != PARM_DECL
941 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
942 return NULL_TREE;
943
944 ref_offset = TREE_OPERAND (rhs, 1);
945
946 if (ref_field)
947 {
948 if (integer_nonzerop (ref_offset))
949 return NULL_TREE;
950
951 if (use_delta)
952 fld = delta_field;
953 else
954 fld = ptr_field;
955
956 return ref_field == fld ? rec : NULL_TREE;
957 }
958
959 if (use_delta)
960 fld_offset = byte_position (delta_field);
961 else
962 fld_offset = byte_position (ptr_field);
963
964 return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
965 }
966
967 /* If STMT looks like a statement loading a value from a member pointer formal
968 parameter, this function returns that parameter. */
969
970 static tree
971 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
972 {
973 tree rhs;
974
975 if (!gimple_assign_single_p (stmt))
976 return NULL_TREE;
977
978 rhs = gimple_assign_rhs1 (stmt);
979 return ipa_get_member_ptr_load_param (rhs, use_delta);
980 }
981
982 /* Returns true iff T is an SSA_NAME defined by a statement. */
983
984 static bool
985 ipa_is_ssa_with_stmt_def (tree t)
986 {
987 if (TREE_CODE (t) == SSA_NAME
988 && !SSA_NAME_IS_DEFAULT_DEF (t))
989 return true;
990 else
991 return false;
992 }
993
994 /* Find the indirect call graph edge corresponding to STMT and add to it all
995 information necessary to describe a call to a parameter number PARAM_INDEX.
996 NODE is the caller. POLYMORPHIC should be set to true iff the call is a
997 virtual one. */
998
999 static void
1000 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt,
1001 bool polymorphic)
1002 {
1003 struct cgraph_edge *cs;
1004
1005 cs = cgraph_edge (node, stmt);
1006 cs->indirect_info->param_index = param_index;
1007 cs->indirect_info->anc_offset = 0;
1008 cs->indirect_info->polymorphic = polymorphic;
1009 if (polymorphic)
1010 {
1011 tree otr = gimple_call_fn (stmt);
1012 tree type, token = OBJ_TYPE_REF_TOKEN (otr);
1013 cs->indirect_info->otr_token = tree_low_cst (token, 1);
1014 type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (otr)));
1015 cs->indirect_info->otr_type = type;
1016 }
1017 }
1018
1019 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1020 (described by INFO). PARMS_INFO is a pointer to a vector containing
1021 intermediate information about each formal parameter. Currently it checks
1022 whether the call calls a pointer that is a formal parameter and if so, the
1023 parameter is marked with the called flag and an indirect call graph edge
1024 describing the call is created. This is very simple for ordinary pointers
1025 represented in SSA but not-so-nice when it comes to member pointers. The
1026 ugly part of this function does nothing more than trying to match the
1027 pattern of such a call. An example of such a pattern is the gimple dump
1028 below, the call is on the last line:
1029
1030 <bb 2>:
1031 f$__delta_5 = f.__delta;
1032 f$__pfn_24 = f.__pfn;
1033
1034 or
1035 <bb 2>:
1036 f$__delta_5 = MEM[(struct *)&f];
1037 f$__pfn_24 = MEM[(struct *)&f + 4B];
1038
1039 and a few lines below:
1040
1041 <bb 5>
1042 D.2496_3 = (int) f$__pfn_24;
1043 D.2497_4 = D.2496_3 & 1;
1044 if (D.2497_4 != 0)
1045 goto <bb 3>;
1046 else
1047 goto <bb 4>;
1048
1049 <bb 6>:
1050 D.2500_7 = (unsigned int) f$__delta_5;
1051 D.2501_8 = &S + D.2500_7;
1052 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1053 D.2503_10 = *D.2502_9;
1054 D.2504_12 = f$__pfn_24 + -1;
1055 D.2505_13 = (unsigned int) D.2504_12;
1056 D.2506_14 = D.2503_10 + D.2505_13;
1057 D.2507_15 = *D.2506_14;
1058 iftmp.11_16 = (String:: *) D.2507_15;
1059
1060 <bb 7>:
1061 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1062 D.2500_19 = (unsigned int) f$__delta_5;
1063 D.2508_20 = &S + D.2500_19;
1064 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1065
1066 Such patterns are results of simple calls to a member pointer:
1067
1068 int doprinting (int (MyString::* f)(int) const)
1069 {
1070 MyString S ("somestring");
1071
1072 return (S.*f)(4);
1073 }
1074 */
1075
1076 static void
1077 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1078 struct ipa_node_params *info,
1079 struct param_analysis_info *parms_info,
1080 gimple call, tree target)
1081 {
1082 gimple def;
1083 tree n1, n2;
1084 gimple d1, d2;
1085 tree rec, rec2, cond;
1086 gimple branch;
1087 int index;
1088 basic_block bb, virt_bb, join;
1089
1090 if (SSA_NAME_IS_DEFAULT_DEF (target))
1091 {
1092 tree var = SSA_NAME_VAR (target);
1093 index = ipa_get_param_decl_index (info, var);
1094 if (index >= 0)
1095 ipa_note_param_call (node, index, call, false);
1096 return;
1097 }
1098
1099 /* Now we need to try to match the complex pattern of calling a member
1100 pointer. */
1101
1102 if (!POINTER_TYPE_P (TREE_TYPE (target))
1103 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1104 return;
1105
1106 def = SSA_NAME_DEF_STMT (target);
1107 if (gimple_code (def) != GIMPLE_PHI)
1108 return;
1109
1110 if (gimple_phi_num_args (def) != 2)
1111 return;
1112
1113 /* First, we need to check whether one of these is a load from a member
1114 pointer that is a parameter to this function. */
1115 n1 = PHI_ARG_DEF (def, 0);
1116 n2 = PHI_ARG_DEF (def, 1);
1117 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1118 return;
1119 d1 = SSA_NAME_DEF_STMT (n1);
1120 d2 = SSA_NAME_DEF_STMT (n2);
1121
1122 join = gimple_bb (def);
1123 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1124 {
1125 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1126 return;
1127
1128 bb = EDGE_PRED (join, 0)->src;
1129 virt_bb = gimple_bb (d2);
1130 }
1131 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1132 {
1133 bb = EDGE_PRED (join, 1)->src;
1134 virt_bb = gimple_bb (d1);
1135 }
1136 else
1137 return;
1138
1139 /* Second, we need to check that the basic blocks are laid out in the way
1140 corresponding to the pattern. */
1141
1142 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1143 || single_pred (virt_bb) != bb
1144 || single_succ (virt_bb) != join)
1145 return;
1146
1147 /* Third, let's see that the branching is done depending on the least
1148 significant bit of the pfn. */
1149
1150 branch = last_stmt (bb);
1151 if (!branch || gimple_code (branch) != GIMPLE_COND)
1152 return;
1153
1154 if (gimple_cond_code (branch) != NE_EXPR
1155 || !integer_zerop (gimple_cond_rhs (branch)))
1156 return;
1157
1158 cond = gimple_cond_lhs (branch);
1159 if (!ipa_is_ssa_with_stmt_def (cond))
1160 return;
1161
1162 def = SSA_NAME_DEF_STMT (cond);
1163 if (!is_gimple_assign (def)
1164 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1165 || !integer_onep (gimple_assign_rhs2 (def)))
1166 return;
1167
1168 cond = gimple_assign_rhs1 (def);
1169 if (!ipa_is_ssa_with_stmt_def (cond))
1170 return;
1171
1172 def = SSA_NAME_DEF_STMT (cond);
1173
1174 if (is_gimple_assign (def)
1175 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1176 {
1177 cond = gimple_assign_rhs1 (def);
1178 if (!ipa_is_ssa_with_stmt_def (cond))
1179 return;
1180 def = SSA_NAME_DEF_STMT (cond);
1181 }
1182
1183 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1184 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1185 == ptrmemfunc_vbit_in_delta));
1186
1187 if (rec != rec2)
1188 return;
1189
1190 index = ipa_get_param_decl_index (info, rec);
1191 if (index >= 0 && !is_parm_modified_before_call (&parms_info[index],
1192 call, rec))
1193 ipa_note_param_call (node, index, call, false);
1194
1195 return;
1196 }
1197
1198 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1199 object referenced in the expression is a formal parameter of the caller
1200 (described by INFO), create a call note for the statement. */
1201
1202 static void
1203 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1204 struct ipa_node_params *info, gimple call,
1205 tree target)
1206 {
1207 tree obj = OBJ_TYPE_REF_OBJECT (target);
1208 tree var;
1209 int index;
1210
1211 if (TREE_CODE (obj) == ADDR_EXPR)
1212 {
1213 do
1214 {
1215 obj = TREE_OPERAND (obj, 0);
1216 }
1217 while (TREE_CODE (obj) == COMPONENT_REF);
1218 if (TREE_CODE (obj) != MEM_REF)
1219 return;
1220 obj = TREE_OPERAND (obj, 0);
1221 }
1222
1223 if (TREE_CODE (obj) != SSA_NAME
1224 || !SSA_NAME_IS_DEFAULT_DEF (obj))
1225 return;
1226
1227 var = SSA_NAME_VAR (obj);
1228 index = ipa_get_param_decl_index (info, var);
1229
1230 if (index >= 0)
1231 ipa_note_param_call (node, index, call, true);
1232 }
1233
1234 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1235 of the caller (described by INFO). PARMS_INFO is a pointer to a vector
1236 containing intermediate information about each formal parameter. */
1237
1238 static void
1239 ipa_analyze_call_uses (struct cgraph_node *node,
1240 struct ipa_node_params *info,
1241 struct param_analysis_info *parms_info, gimple call)
1242 {
1243 tree target = gimple_call_fn (call);
1244
1245 if (TREE_CODE (target) == SSA_NAME)
1246 ipa_analyze_indirect_call_uses (node, info, parms_info, call, target);
1247 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1248 ipa_analyze_virtual_call_uses (node, info, call, target);
1249 }
1250
1251
1252 /* Analyze the call statement STMT with respect to formal parameters (described
1253 in INFO) of caller given by NODE. Currently it only checks whether formal
1254 parameters are called. PARMS_INFO is a pointer to a vector containing
1255 intermediate information about each formal parameter. */
1256
1257 static void
1258 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1259 struct param_analysis_info *parms_info, gimple stmt)
1260 {
1261 if (is_gimple_call (stmt))
1262 ipa_analyze_call_uses (node, info, parms_info, stmt);
1263 }
1264
1265 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1266 If OP is a parameter declaration, mark it as used in the info structure
1267 passed in DATA. */
1268
1269 static bool
1270 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1271 tree op, void *data)
1272 {
1273 struct ipa_node_params *info = (struct ipa_node_params *) data;
1274
1275 op = get_base_address (op);
1276 if (op
1277 && TREE_CODE (op) == PARM_DECL)
1278 {
1279 int index = ipa_get_param_decl_index (info, op);
1280 gcc_assert (index >= 0);
1281 info->params[index].used = true;
1282 }
1283
1284 return false;
1285 }
1286
1287 /* Scan the function body of NODE and inspect the uses of formal parameters.
1288 Store the findings in various structures of the associated ipa_node_params
1289 structure, such as parameter flags, notes etc. PARMS_INFO is a pointer to a
1290 vector containing intermediate information about each formal parameter. */
1291
1292 static void
1293 ipa_analyze_params_uses (struct cgraph_node *node,
1294 struct param_analysis_info *parms_info)
1295 {
1296 tree decl = node->decl;
1297 basic_block bb;
1298 struct function *func;
1299 gimple_stmt_iterator gsi;
1300 struct ipa_node_params *info = IPA_NODE_REF (node);
1301 int i;
1302
1303 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1304 return;
1305
1306 for (i = 0; i < ipa_get_param_count (info); i++)
1307 {
1308 tree parm = ipa_get_param (info, i);
1309 /* For SSA regs see if parameter is used. For non-SSA we compute
1310 the flag during modification analysis. */
1311 if (is_gimple_reg (parm)
1312 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
1313 info->params[i].used = true;
1314 }
1315
1316 func = DECL_STRUCT_FUNCTION (decl);
1317 FOR_EACH_BB_FN (bb, func)
1318 {
1319 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1320 {
1321 gimple stmt = gsi_stmt (gsi);
1322
1323 if (is_gimple_debug (stmt))
1324 continue;
1325
1326 ipa_analyze_stmt_uses (node, info, parms_info, stmt);
1327 walk_stmt_load_store_addr_ops (stmt, info,
1328 visit_ref_for_mod_analysis,
1329 visit_ref_for_mod_analysis,
1330 visit_ref_for_mod_analysis);
1331 }
1332 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
1333 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1334 visit_ref_for_mod_analysis,
1335 visit_ref_for_mod_analysis,
1336 visit_ref_for_mod_analysis);
1337 }
1338
1339 info->uses_analysis_done = 1;
1340 }
1341
1342 /* Initialize the array describing properties of of formal parameters of NODE,
1343 analyze their uses and and compute jump functions associated witu actual
1344 arguments of calls from within NODE. */
1345
1346 void
1347 ipa_analyze_node (struct cgraph_node *node)
1348 {
1349 struct ipa_node_params *info = IPA_NODE_REF (node);
1350 struct param_analysis_info *parms_info;
1351 int i, param_count;
1352
1353 ipa_initialize_node_params (node);
1354
1355 param_count = ipa_get_param_count (info);
1356 parms_info = XALLOCAVEC (struct param_analysis_info, param_count);
1357 memset (parms_info, 0, sizeof (struct param_analysis_info) * param_count);
1358
1359 ipa_analyze_params_uses (node, parms_info);
1360 ipa_compute_jump_functions (node, parms_info);
1361
1362 for (i = 0; i < param_count; i++)
1363 if (parms_info[i].visited_statements)
1364 BITMAP_FREE (parms_info[i].visited_statements);
1365 }
1366
1367
1368 /* Update the jump function DST when the call graph edge correspondng to SRC is
1369 is being inlined, knowing that DST is of type ancestor and src of known
1370 type. */
1371
1372 static void
1373 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1374 struct ipa_jump_func *dst)
1375 {
1376 tree new_binfo;
1377
1378 new_binfo = get_binfo_at_offset (src->value.base_binfo,
1379 dst->value.ancestor.offset,
1380 dst->value.ancestor.type);
1381 if (new_binfo)
1382 {
1383 dst->type = IPA_JF_KNOWN_TYPE;
1384 dst->value.base_binfo = new_binfo;
1385 }
1386 else
1387 dst->type = IPA_JF_UNKNOWN;
1388 }
1389
1390 /* Update the jump functions associated with call graph edge E when the call
1391 graph edge CS is being inlined, assuming that E->caller is already (possibly
1392 indirectly) inlined into CS->callee and that E has not been inlined. */
1393
1394 static void
1395 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1396 struct cgraph_edge *e)
1397 {
1398 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1399 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1400 int count = ipa_get_cs_argument_count (args);
1401 int i;
1402
1403 for (i = 0; i < count; i++)
1404 {
1405 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1406
1407 if (dst->type == IPA_JF_ANCESTOR)
1408 {
1409 struct ipa_jump_func *src;
1410
1411 /* Variable number of arguments can cause havoc if we try to access
1412 one that does not exist in the inlined edge. So make sure we
1413 don't. */
1414 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1415 {
1416 dst->type = IPA_JF_UNKNOWN;
1417 continue;
1418 }
1419
1420 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1421 if (src->type == IPA_JF_KNOWN_TYPE)
1422 combine_known_type_and_ancestor_jfs (src, dst);
1423 else if (src->type == IPA_JF_CONST)
1424 {
1425 struct ipa_jump_func kt_func;
1426
1427 kt_func.type = IPA_JF_UNKNOWN;
1428 compute_known_type_jump_func (src->value.constant, &kt_func);
1429 if (kt_func.type == IPA_JF_KNOWN_TYPE)
1430 combine_known_type_and_ancestor_jfs (&kt_func, dst);
1431 else
1432 dst->type = IPA_JF_UNKNOWN;
1433 }
1434 else if (src->type == IPA_JF_PASS_THROUGH
1435 && src->value.pass_through.operation == NOP_EXPR)
1436 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1437 else if (src->type == IPA_JF_ANCESTOR)
1438 {
1439 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1440 dst->value.ancestor.offset += src->value.ancestor.offset;
1441 }
1442 else
1443 dst->type = IPA_JF_UNKNOWN;
1444 }
1445 else if (dst->type == IPA_JF_PASS_THROUGH)
1446 {
1447 struct ipa_jump_func *src;
1448 /* We must check range due to calls with variable number of arguments
1449 and we cannot combine jump functions with operations. */
1450 if (dst->value.pass_through.operation == NOP_EXPR
1451 && (dst->value.pass_through.formal_id
1452 < ipa_get_cs_argument_count (top)))
1453 {
1454 src = ipa_get_ith_jump_func (top,
1455 dst->value.pass_through.formal_id);
1456 *dst = *src;
1457 }
1458 else
1459 dst->type = IPA_JF_UNKNOWN;
1460 }
1461 }
1462 }
1463
1464 /* If TARGET is an addr_expr of a function declaration, make it the destination
1465 of an indirect edge IE and return the edge. Otherwise, return NULL. Delta,
1466 if non-NULL, is an integer constant that must be added to this pointer
1467 (first parameter). */
1468
1469 struct cgraph_edge *
1470 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target, tree delta)
1471 {
1472 struct cgraph_node *callee;
1473
1474 if (TREE_CODE (target) == ADDR_EXPR)
1475 target = TREE_OPERAND (target, 0);
1476 if (TREE_CODE (target) != FUNCTION_DECL)
1477 return NULL;
1478 callee = cgraph_node (target);
1479 if (!callee)
1480 return NULL;
1481 ipa_check_create_node_params ();
1482
1483 cgraph_make_edge_direct (ie, callee, delta);
1484 if (dump_file)
1485 {
1486 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1487 "(%s/%i -> %s/%i), for stmt ",
1488 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1489 cgraph_node_name (ie->caller), ie->caller->uid,
1490 cgraph_node_name (ie->callee), ie->callee->uid);
1491 if (ie->call_stmt)
1492 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1493 else
1494 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1495
1496 if (delta)
1497 {
1498 fprintf (dump_file, " Thunk delta is ");
1499 print_generic_expr (dump_file, delta, 0);
1500 fprintf (dump_file, "\n");
1501 }
1502 }
1503
1504 if (ipa_get_cs_argument_count (IPA_EDGE_REF (ie))
1505 != ipa_get_param_count (IPA_NODE_REF (callee)))
1506 ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));
1507
1508 return ie;
1509 }
1510
1511 /* Try to find a destination for indirect edge IE that corresponds to a simple
1512 call or a call of a member function pointer and where the destination is a
1513 pointer formal parameter described by jump function JFUNC. If it can be
1514 determined, return the newly direct edge, otherwise return NULL. */
1515
1516 static struct cgraph_edge *
1517 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1518 struct ipa_jump_func *jfunc)
1519 {
1520 tree target;
1521
1522 if (jfunc->type == IPA_JF_CONST)
1523 target = jfunc->value.constant;
1524 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1525 target = jfunc->value.member_cst.pfn;
1526 else
1527 return NULL;
1528
1529 return ipa_make_edge_direct_to_target (ie, target, NULL_TREE);
1530 }
1531
1532 /* Try to find a destination for indirect edge IE that corresponds to a
1533 virtuall call based on a formal parameter which is described by jump
1534 function JFUNC and if it can be determined, make it direct and return the
1535 direct edge. Otherwise, return NULL. */
1536
1537 static struct cgraph_edge *
1538 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1539 struct ipa_jump_func *jfunc)
1540 {
1541 tree binfo, type, target, delta;
1542 HOST_WIDE_INT token;
1543
1544 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1545 binfo = jfunc->value.base_binfo;
1546 else if (jfunc->type == IPA_JF_CONST)
1547 {
1548 tree cst = jfunc->value.constant;
1549 if (TREE_CODE (cst) == ADDR_EXPR)
1550 binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (cst, 0),
1551 NULL_TREE);
1552 else
1553 return NULL;
1554 }
1555 else
1556 return NULL;
1557
1558 if (!binfo)
1559 return NULL;
1560
1561 token = ie->indirect_info->otr_token;
1562 type = ie->indirect_info->otr_type;
1563 binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
1564 if (binfo)
1565 target = gimple_get_virt_mehtod_for_binfo (token, binfo, &delta, true);
1566 else
1567 return NULL;
1568
1569 if (target)
1570 return ipa_make_edge_direct_to_target (ie, target, delta);
1571 else
1572 return NULL;
1573 }
1574
1575 /* Update the param called notes associated with NODE when CS is being inlined,
1576 assuming NODE is (potentially indirectly) inlined into CS->callee.
1577 Moreover, if the callee is discovered to be constant, create a new cgraph
1578 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1579 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1580
1581 static bool
1582 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1583 struct cgraph_node *node,
1584 VEC (cgraph_edge_p, heap) **new_edges)
1585 {
1586 struct ipa_edge_args *top;
1587 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1588 bool res = false;
1589
1590 ipa_check_create_edge_args ();
1591 top = IPA_EDGE_REF (cs);
1592
1593 for (ie = node->indirect_calls; ie; ie = next_ie)
1594 {
1595 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1596 struct ipa_jump_func *jfunc;
1597
1598 next_ie = ie->next_callee;
1599 if (bitmap_bit_p (iinlining_processed_edges, ie->uid))
1600 continue;
1601
1602 /* If we ever use indirect edges for anything other than indirect
1603 inlining, we will need to skip those with negative param_indices. */
1604 if (ici->param_index == -1)
1605 continue;
1606
1607 /* We must check range due to calls with variable number of arguments: */
1608 if (ici->param_index >= ipa_get_cs_argument_count (top))
1609 {
1610 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1611 continue;
1612 }
1613
1614 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1615 if (jfunc->type == IPA_JF_PASS_THROUGH
1616 && jfunc->value.pass_through.operation == NOP_EXPR)
1617 ici->param_index = jfunc->value.pass_through.formal_id;
1618 else if (jfunc->type == IPA_JF_ANCESTOR)
1619 {
1620 ici->param_index = jfunc->value.ancestor.formal_id;
1621 ici->anc_offset += jfunc->value.ancestor.offset;
1622 }
1623 else
1624 /* Either we can find a destination for this edge now or never. */
1625 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1626
1627 if (ici->polymorphic)
1628 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1629 else
1630 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1631
1632 if (new_direct_edge)
1633 {
1634 new_direct_edge->indirect_inlining_edge = 1;
1635 if (new_edges)
1636 {
1637 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1638 new_direct_edge);
1639 top = IPA_EDGE_REF (cs);
1640 res = true;
1641 }
1642 }
1643 }
1644
1645 return res;
1646 }
1647
1648 /* Recursively traverse subtree of NODE (including node) made of inlined
1649 cgraph_edges when CS has been inlined and invoke
1650 update_indirect_edges_after_inlining on all nodes and
1651 update_jump_functions_after_inlining on all non-inlined edges that lead out
1652 of this subtree. Newly discovered indirect edges will be added to
1653 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1654 created. */
1655
1656 static bool
1657 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1658 struct cgraph_node *node,
1659 VEC (cgraph_edge_p, heap) **new_edges)
1660 {
1661 struct cgraph_edge *e;
1662 bool res;
1663
1664 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1665
1666 for (e = node->callees; e; e = e->next_callee)
1667 if (!e->inline_failed)
1668 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1669 else
1670 update_jump_functions_after_inlining (cs, e);
1671
1672 return res;
1673 }
1674
1675 /* Update jump functions and call note functions on inlining the call site CS.
1676 CS is expected to lead to a node already cloned by
1677 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1678 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1679 created. */
1680
1681 bool
1682 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1683 VEC (cgraph_edge_p, heap) **new_edges)
1684 {
1685 /* FIXME lto: We do not stream out indirect call information. */
1686 if (flag_wpa)
1687 return false;
1688
1689 /* Do nothing if the preparation phase has not been carried out yet
1690 (i.e. during early inlining). */
1691 if (!ipa_node_params_vector)
1692 return false;
1693 gcc_assert (ipa_edge_args_vector);
1694
1695 return propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1696 }
1697
1698 /* Frees all dynamically allocated structures that the argument info points
1699 to. */
1700
1701 void
1702 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1703 {
1704 if (args->jump_functions)
1705 ggc_free (args->jump_functions);
1706
1707 memset (args, 0, sizeof (*args));
1708 }
1709
1710 /* Free all ipa_edge structures. */
1711
1712 void
1713 ipa_free_all_edge_args (void)
1714 {
1715 int i;
1716 struct ipa_edge_args *args;
1717
1718 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
1719 ipa_free_edge_args_substructures (args);
1720
1721 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
1722 ipa_edge_args_vector = NULL;
1723 }
1724
1725 /* Frees all dynamically allocated structures that the param info points
1726 to. */
1727
1728 void
1729 ipa_free_node_params_substructures (struct ipa_node_params *info)
1730 {
1731 if (info->params)
1732 free (info->params);
1733
1734 memset (info, 0, sizeof (*info));
1735 }
1736
1737 /* Free all ipa_node_params structures. */
1738
1739 void
1740 ipa_free_all_node_params (void)
1741 {
1742 int i;
1743 struct ipa_node_params *info;
1744
1745 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
1746 ipa_free_node_params_substructures (info);
1747
1748 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
1749 ipa_node_params_vector = NULL;
1750 }
1751
1752 /* Hook that is called by cgraph.c when an edge is removed. */
1753
1754 static void
1755 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
1756 {
1757 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1758 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
1759 <= (unsigned)cs->uid)
1760 return;
1761 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1762 }
1763
1764 /* Hook that is called by cgraph.c when a node is removed. */
1765
1766 static void
1767 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1768 {
1769 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1770 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
1771 <= (unsigned)node->uid)
1772 return;
1773 ipa_free_node_params_substructures (IPA_NODE_REF (node));
1774 }
1775
1776 /* Helper function to duplicate an array of size N that is at SRC and store a
1777 pointer to it to DST. Nothing is done if SRC is NULL. */
1778
1779 static void *
1780 duplicate_array (void *src, size_t n)
1781 {
1782 void *p;
1783
1784 if (!src)
1785 return NULL;
1786
1787 p = xmalloc (n);
1788 memcpy (p, src, n);
1789 return p;
1790 }
1791
1792 static struct ipa_jump_func *
1793 duplicate_ipa_jump_func_array (const struct ipa_jump_func * src, size_t n)
1794 {
1795 struct ipa_jump_func *p;
1796
1797 if (!src)
1798 return NULL;
1799
1800 p = ggc_alloc_vec_ipa_jump_func (n);
1801 memcpy (p, src, n * sizeof (struct ipa_jump_func));
1802 return p;
1803 }
1804
1805 /* Hook that is called by cgraph.c when a node is duplicated. */
1806
1807 static void
1808 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
1809 __attribute__((unused)) void *data)
1810 {
1811 struct ipa_edge_args *old_args, *new_args;
1812 int arg_count;
1813
1814 ipa_check_create_edge_args ();
1815
1816 old_args = IPA_EDGE_REF (src);
1817 new_args = IPA_EDGE_REF (dst);
1818
1819 arg_count = ipa_get_cs_argument_count (old_args);
1820 ipa_set_cs_argument_count (new_args, arg_count);
1821 new_args->jump_functions =
1822 duplicate_ipa_jump_func_array (old_args->jump_functions, arg_count);
1823
1824 if (iinlining_processed_edges
1825 && bitmap_bit_p (iinlining_processed_edges, src->uid))
1826 bitmap_set_bit (iinlining_processed_edges, dst->uid);
1827 }
1828
1829 /* Hook that is called by cgraph.c when a node is duplicated. */
1830
1831 static void
1832 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
1833 __attribute__((unused)) void *data)
1834 {
1835 struct ipa_node_params *old_info, *new_info;
1836 int param_count, i;
1837
1838 ipa_check_create_node_params ();
1839 old_info = IPA_NODE_REF (src);
1840 new_info = IPA_NODE_REF (dst);
1841 param_count = ipa_get_param_count (old_info);
1842
1843 ipa_set_param_count (new_info, param_count);
1844 new_info->params = (struct ipa_param_descriptor *)
1845 duplicate_array (old_info->params,
1846 sizeof (struct ipa_param_descriptor) * param_count);
1847 for (i = 0; i < param_count; i++)
1848 new_info->params[i].types = VEC_copy (tree, heap,
1849 old_info->params[i].types);
1850 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
1851 new_info->count_scale = old_info->count_scale;
1852
1853 new_info->called_with_var_arguments = old_info->called_with_var_arguments;
1854 new_info->uses_analysis_done = old_info->uses_analysis_done;
1855 new_info->node_enqueued = old_info->node_enqueued;
1856 }
1857
1858 /* Register our cgraph hooks if they are not already there. */
1859
1860 void
1861 ipa_register_cgraph_hooks (void)
1862 {
1863 if (!edge_removal_hook_holder)
1864 edge_removal_hook_holder =
1865 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
1866 if (!node_removal_hook_holder)
1867 node_removal_hook_holder =
1868 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
1869 if (!edge_duplication_hook_holder)
1870 edge_duplication_hook_holder =
1871 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
1872 if (!node_duplication_hook_holder)
1873 node_duplication_hook_holder =
1874 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
1875 }
1876
1877 /* Unregister our cgraph hooks if they are not already there. */
1878
1879 static void
1880 ipa_unregister_cgraph_hooks (void)
1881 {
1882 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
1883 edge_removal_hook_holder = NULL;
1884 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1885 node_removal_hook_holder = NULL;
1886 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
1887 edge_duplication_hook_holder = NULL;
1888 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1889 node_duplication_hook_holder = NULL;
1890 }
1891
1892 /* Allocate all necessary data strucutures necessary for indirect inlining. */
1893
1894 void
1895 ipa_create_all_structures_for_iinln (void)
1896 {
1897 iinlining_processed_edges = BITMAP_ALLOC (NULL);
1898 }
1899
1900 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1901 longer needed after ipa-cp. */
1902
1903 void
1904 ipa_free_all_structures_after_ipa_cp (void)
1905 {
1906 if (!flag_indirect_inlining)
1907 {
1908 ipa_free_all_edge_args ();
1909 ipa_free_all_node_params ();
1910 ipa_unregister_cgraph_hooks ();
1911 }
1912 }
1913
1914 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1915 longer needed after indirect inlining. */
1916
1917 void
1918 ipa_free_all_structures_after_iinln (void)
1919 {
1920 BITMAP_FREE (iinlining_processed_edges);
1921
1922 ipa_free_all_edge_args ();
1923 ipa_free_all_node_params ();
1924 ipa_unregister_cgraph_hooks ();
1925 }
1926
1927 /* Print ipa_tree_map data structures of all functions in the
1928 callgraph to F. */
1929
1930 void
1931 ipa_print_node_params (FILE * f, struct cgraph_node *node)
1932 {
1933 int i, count;
1934 tree temp;
1935 struct ipa_node_params *info;
1936
1937 if (!node->analyzed)
1938 return;
1939 info = IPA_NODE_REF (node);
1940 fprintf (f, " function %s parameter descriptors:\n",
1941 cgraph_node_name (node));
1942 count = ipa_get_param_count (info);
1943 for (i = 0; i < count; i++)
1944 {
1945 temp = ipa_get_param (info, i);
1946 if (TREE_CODE (temp) == PARM_DECL)
1947 fprintf (f, " param %d : %s", i,
1948 (DECL_NAME (temp)
1949 ? (*lang_hooks.decl_printable_name) (temp, 2)
1950 : "(unnamed)"));
1951 if (ipa_is_param_used (info, i))
1952 fprintf (f, " used");
1953 fprintf (f, "\n");
1954 }
1955 }
1956
1957 /* Print ipa_tree_map data structures of all functions in the
1958 callgraph to F. */
1959
1960 void
1961 ipa_print_all_params (FILE * f)
1962 {
1963 struct cgraph_node *node;
1964
1965 fprintf (f, "\nFunction parameters:\n");
1966 for (node = cgraph_nodes; node; node = node->next)
1967 ipa_print_node_params (f, node);
1968 }
1969
1970 /* Return a heap allocated vector containing formal parameters of FNDECL. */
1971
1972 VEC(tree, heap) *
1973 ipa_get_vector_of_formal_parms (tree fndecl)
1974 {
1975 VEC(tree, heap) *args;
1976 int count;
1977 tree parm;
1978
1979 count = count_formal_params_1 (fndecl);
1980 args = VEC_alloc (tree, heap, count);
1981 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
1982 VEC_quick_push (tree, args, parm);
1983
1984 return args;
1985 }
1986
1987 /* Return a heap allocated vector containing types of formal parameters of
1988 function type FNTYPE. */
1989
1990 static inline VEC(tree, heap) *
1991 get_vector_of_formal_parm_types (tree fntype)
1992 {
1993 VEC(tree, heap) *types;
1994 int count = 0;
1995 tree t;
1996
1997 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
1998 count++;
1999
2000 types = VEC_alloc (tree, heap, count);
2001 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2002 VEC_quick_push (tree, types, TREE_VALUE (t));
2003
2004 return types;
2005 }
2006
2007 /* Modify the function declaration FNDECL and its type according to the plan in
2008 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2009 to reflect the actual parameters being modified which are determined by the
2010 base_index field. */
2011
2012 void
2013 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2014 const char *synth_parm_prefix)
2015 {
2016 VEC(tree, heap) *oparms, *otypes;
2017 tree orig_type, new_type = NULL;
2018 tree old_arg_types, t, new_arg_types = NULL;
2019 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2020 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2021 tree new_reversed = NULL;
2022 bool care_for_types, last_parm_void;
2023
2024 if (!synth_parm_prefix)
2025 synth_parm_prefix = "SYNTH";
2026
2027 oparms = ipa_get_vector_of_formal_parms (fndecl);
2028 orig_type = TREE_TYPE (fndecl);
2029 old_arg_types = TYPE_ARG_TYPES (orig_type);
2030
2031 /* The following test is an ugly hack, some functions simply don't have any
2032 arguments in their type. This is probably a bug but well... */
2033 care_for_types = (old_arg_types != NULL_TREE);
2034 if (care_for_types)
2035 {
2036 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2037 == void_type_node);
2038 otypes = get_vector_of_formal_parm_types (orig_type);
2039 if (last_parm_void)
2040 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2041 else
2042 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2043 }
2044 else
2045 {
2046 last_parm_void = false;
2047 otypes = NULL;
2048 }
2049
2050 for (i = 0; i < len; i++)
2051 {
2052 struct ipa_parm_adjustment *adj;
2053 gcc_assert (link);
2054
2055 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2056 parm = VEC_index (tree, oparms, adj->base_index);
2057 adj->base = parm;
2058
2059 if (adj->copy_param)
2060 {
2061 if (care_for_types)
2062 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2063 adj->base_index),
2064 new_arg_types);
2065 *link = parm;
2066 link = &DECL_CHAIN (parm);
2067 }
2068 else if (!adj->remove_param)
2069 {
2070 tree new_parm;
2071 tree ptype;
2072
2073 if (adj->by_ref)
2074 ptype = build_pointer_type (adj->type);
2075 else
2076 ptype = adj->type;
2077
2078 if (care_for_types)
2079 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2080
2081 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2082 ptype);
2083 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2084
2085 DECL_ARTIFICIAL (new_parm) = 1;
2086 DECL_ARG_TYPE (new_parm) = ptype;
2087 DECL_CONTEXT (new_parm) = fndecl;
2088 TREE_USED (new_parm) = 1;
2089 DECL_IGNORED_P (new_parm) = 1;
2090 layout_decl (new_parm, 0);
2091
2092 add_referenced_var (new_parm);
2093 mark_sym_for_renaming (new_parm);
2094 adj->base = parm;
2095 adj->reduction = new_parm;
2096
2097 *link = new_parm;
2098
2099 link = &DECL_CHAIN (new_parm);
2100 }
2101 }
2102
2103 *link = NULL_TREE;
2104
2105 if (care_for_types)
2106 {
2107 new_reversed = nreverse (new_arg_types);
2108 if (last_parm_void)
2109 {
2110 if (new_reversed)
2111 TREE_CHAIN (new_arg_types) = void_list_node;
2112 else
2113 new_reversed = void_list_node;
2114 }
2115 }
2116
2117 /* Use copy_node to preserve as much as possible from original type
2118 (debug info, attribute lists etc.)
2119 Exception is METHOD_TYPEs must have THIS argument.
2120 When we are asked to remove it, we need to build new FUNCTION_TYPE
2121 instead. */
2122 if (TREE_CODE (orig_type) != METHOD_TYPE
2123 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2124 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2125 {
2126 new_type = build_distinct_type_copy (orig_type);
2127 TYPE_ARG_TYPES (new_type) = new_reversed;
2128 }
2129 else
2130 {
2131 new_type
2132 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2133 new_reversed));
2134 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2135 DECL_VINDEX (fndecl) = NULL_TREE;
2136 }
2137
2138 /* When signature changes, we need to clear builtin info. */
2139 if (DECL_BUILT_IN (fndecl))
2140 {
2141 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2142 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2143 }
2144
2145 /* This is a new type, not a copy of an old type. Need to reassociate
2146 variants. We can handle everything except the main variant lazily. */
2147 t = TYPE_MAIN_VARIANT (orig_type);
2148 if (orig_type != t)
2149 {
2150 TYPE_MAIN_VARIANT (new_type) = t;
2151 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2152 TYPE_NEXT_VARIANT (t) = new_type;
2153 }
2154 else
2155 {
2156 TYPE_MAIN_VARIANT (new_type) = new_type;
2157 TYPE_NEXT_VARIANT (new_type) = NULL;
2158 }
2159
2160 TREE_TYPE (fndecl) = new_type;
2161 DECL_VIRTUAL_P (fndecl) = 0;
2162 if (otypes)
2163 VEC_free (tree, heap, otypes);
2164 VEC_free (tree, heap, oparms);
2165 }
2166
2167 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2168 If this is a directly recursive call, CS must be NULL. Otherwise it must
2169 contain the corresponding call graph edge. */
2170
2171 void
2172 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2173 ipa_parm_adjustment_vec adjustments)
2174 {
2175 VEC(tree, heap) *vargs;
2176 gimple new_stmt;
2177 gimple_stmt_iterator gsi;
2178 tree callee_decl;
2179 int i, len;
2180
2181 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2182 vargs = VEC_alloc (tree, heap, len);
2183
2184 gsi = gsi_for_stmt (stmt);
2185 for (i = 0; i < len; i++)
2186 {
2187 struct ipa_parm_adjustment *adj;
2188
2189 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2190
2191 if (adj->copy_param)
2192 {
2193 tree arg = gimple_call_arg (stmt, adj->base_index);
2194
2195 VEC_quick_push (tree, vargs, arg);
2196 }
2197 else if (!adj->remove_param)
2198 {
2199 tree expr, base, off;
2200 location_t loc;
2201
2202 /* We create a new parameter out of the value of the old one, we can
2203 do the following kind of transformations:
2204
2205 - A scalar passed by reference is converted to a scalar passed by
2206 value. (adj->by_ref is false and the type of the original
2207 actual argument is a pointer to a scalar).
2208
2209 - A part of an aggregate is passed instead of the whole aggregate.
2210 The part can be passed either by value or by reference, this is
2211 determined by value of adj->by_ref. Moreover, the code below
2212 handles both situations when the original aggregate is passed by
2213 value (its type is not a pointer) and when it is passed by
2214 reference (it is a pointer to an aggregate).
2215
2216 When the new argument is passed by reference (adj->by_ref is true)
2217 it must be a part of an aggregate and therefore we form it by
2218 simply taking the address of a reference inside the original
2219 aggregate. */
2220
2221 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2222 base = gimple_call_arg (stmt, adj->base_index);
2223 loc = EXPR_LOCATION (base);
2224
2225 if (TREE_CODE (base) != ADDR_EXPR
2226 && POINTER_TYPE_P (TREE_TYPE (base)))
2227 off = build_int_cst (adj->alias_ptr_type,
2228 adj->offset / BITS_PER_UNIT);
2229 else
2230 {
2231 HOST_WIDE_INT base_offset;
2232 tree prev_base;
2233
2234 if (TREE_CODE (base) == ADDR_EXPR)
2235 base = TREE_OPERAND (base, 0);
2236 prev_base = base;
2237 base = get_addr_base_and_unit_offset (base, &base_offset);
2238 /* Aggregate arguments can have non-invariant addresses. */
2239 if (!base)
2240 {
2241 base = build_fold_addr_expr (prev_base);
2242 off = build_int_cst (adj->alias_ptr_type,
2243 adj->offset / BITS_PER_UNIT);
2244 }
2245 else if (TREE_CODE (base) == MEM_REF)
2246 {
2247 off = build_int_cst (adj->alias_ptr_type,
2248 base_offset
2249 + adj->offset / BITS_PER_UNIT);
2250 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2251 off, 0);
2252 base = TREE_OPERAND (base, 0);
2253 }
2254 else
2255 {
2256 off = build_int_cst (adj->alias_ptr_type,
2257 base_offset
2258 + adj->offset / BITS_PER_UNIT);
2259 base = build_fold_addr_expr (base);
2260 }
2261 }
2262
2263 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2264 if (adj->by_ref)
2265 expr = build_fold_addr_expr (expr);
2266
2267 expr = force_gimple_operand_gsi (&gsi, expr,
2268 adj->by_ref
2269 || is_gimple_reg_type (adj->type),
2270 NULL, true, GSI_SAME_STMT);
2271 VEC_quick_push (tree, vargs, expr);
2272 }
2273 }
2274
2275 if (dump_file && (dump_flags & TDF_DETAILS))
2276 {
2277 fprintf (dump_file, "replacing stmt:");
2278 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2279 }
2280
2281 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2282 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2283 VEC_free (tree, heap, vargs);
2284 if (gimple_call_lhs (stmt))
2285 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2286
2287 gimple_set_block (new_stmt, gimple_block (stmt));
2288 if (gimple_has_location (stmt))
2289 gimple_set_location (new_stmt, gimple_location (stmt));
2290 gimple_call_copy_flags (new_stmt, stmt);
2291 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2292
2293 if (dump_file && (dump_flags & TDF_DETAILS))
2294 {
2295 fprintf (dump_file, "with stmt:");
2296 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2297 fprintf (dump_file, "\n");
2298 }
2299 gsi_replace (&gsi, new_stmt, true);
2300 if (cs)
2301 cgraph_set_call_stmt (cs, new_stmt);
2302 update_ssa (TODO_update_ssa);
2303 free_dominance_info (CDI_DOMINATORS);
2304 }
2305
2306 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2307
2308 static bool
2309 index_in_adjustments_multiple_times_p (int base_index,
2310 ipa_parm_adjustment_vec adjustments)
2311 {
2312 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2313 bool one = false;
2314
2315 for (i = 0; i < len; i++)
2316 {
2317 struct ipa_parm_adjustment *adj;
2318 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2319
2320 if (adj->base_index == base_index)
2321 {
2322 if (one)
2323 return true;
2324 else
2325 one = true;
2326 }
2327 }
2328 return false;
2329 }
2330
2331
2332 /* Return adjustments that should have the same effect on function parameters
2333 and call arguments as if they were first changed according to adjustments in
2334 INNER and then by adjustments in OUTER. */
2335
2336 ipa_parm_adjustment_vec
2337 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2338 ipa_parm_adjustment_vec outer)
2339 {
2340 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2341 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2342 int removals = 0;
2343 ipa_parm_adjustment_vec adjustments, tmp;
2344
2345 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2346 for (i = 0; i < inlen; i++)
2347 {
2348 struct ipa_parm_adjustment *n;
2349 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2350
2351 if (n->remove_param)
2352 removals++;
2353 else
2354 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2355 }
2356
2357 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2358 for (i = 0; i < outlen; i++)
2359 {
2360 struct ipa_parm_adjustment *r;
2361 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2362 outer, i);
2363 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2364 out->base_index);
2365
2366 gcc_assert (!in->remove_param);
2367 if (out->remove_param)
2368 {
2369 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2370 {
2371 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2372 memset (r, 0, sizeof (*r));
2373 r->remove_param = true;
2374 }
2375 continue;
2376 }
2377
2378 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2379 memset (r, 0, sizeof (*r));
2380 r->base_index = in->base_index;
2381 r->type = out->type;
2382
2383 /* FIXME: Create nonlocal value too. */
2384
2385 if (in->copy_param && out->copy_param)
2386 r->copy_param = true;
2387 else if (in->copy_param)
2388 r->offset = out->offset;
2389 else if (out->copy_param)
2390 r->offset = in->offset;
2391 else
2392 r->offset = in->offset + out->offset;
2393 }
2394
2395 for (i = 0; i < inlen; i++)
2396 {
2397 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2398 inner, i);
2399
2400 if (n->remove_param)
2401 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2402 }
2403
2404 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2405 return adjustments;
2406 }
2407
2408 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2409 friendly way, assuming they are meant to be applied to FNDECL. */
2410
2411 void
2412 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2413 tree fndecl)
2414 {
2415 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2416 bool first = true;
2417 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2418
2419 fprintf (file, "IPA param adjustments: ");
2420 for (i = 0; i < len; i++)
2421 {
2422 struct ipa_parm_adjustment *adj;
2423 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2424
2425 if (!first)
2426 fprintf (file, " ");
2427 else
2428 first = false;
2429
2430 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2431 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2432 if (adj->base)
2433 {
2434 fprintf (file, ", base: ");
2435 print_generic_expr (file, adj->base, 0);
2436 }
2437 if (adj->reduction)
2438 {
2439 fprintf (file, ", reduction: ");
2440 print_generic_expr (file, adj->reduction, 0);
2441 }
2442 if (adj->new_ssa_base)
2443 {
2444 fprintf (file, ", new_ssa_base: ");
2445 print_generic_expr (file, adj->new_ssa_base, 0);
2446 }
2447
2448 if (adj->copy_param)
2449 fprintf (file, ", copy_param");
2450 else if (adj->remove_param)
2451 fprintf (file, ", remove_param");
2452 else
2453 fprintf (file, ", offset %li", (long) adj->offset);
2454 if (adj->by_ref)
2455 fprintf (file, ", by_ref");
2456 print_node_brief (file, ", type: ", adj->type, 0);
2457 fprintf (file, "\n");
2458 }
2459 VEC_free (tree, heap, parms);
2460 }
2461
2462 /* Stream out jump function JUMP_FUNC to OB. */
2463
2464 static void
2465 ipa_write_jump_function (struct output_block *ob,
2466 struct ipa_jump_func *jump_func)
2467 {
2468 lto_output_uleb128_stream (ob->main_stream,
2469 jump_func->type);
2470
2471 switch (jump_func->type)
2472 {
2473 case IPA_JF_UNKNOWN:
2474 break;
2475 case IPA_JF_KNOWN_TYPE:
2476 lto_output_tree (ob, jump_func->value.base_binfo, true);
2477 break;
2478 case IPA_JF_CONST:
2479 lto_output_tree (ob, jump_func->value.constant, true);
2480 break;
2481 case IPA_JF_PASS_THROUGH:
2482 lto_output_tree (ob, jump_func->value.pass_through.operand, true);
2483 lto_output_uleb128_stream (ob->main_stream,
2484 jump_func->value.pass_through.formal_id);
2485 lto_output_uleb128_stream (ob->main_stream,
2486 jump_func->value.pass_through.operation);
2487 break;
2488 case IPA_JF_ANCESTOR:
2489 lto_output_uleb128_stream (ob->main_stream,
2490 jump_func->value.ancestor.offset);
2491 lto_output_tree (ob, jump_func->value.ancestor.type, true);
2492 lto_output_uleb128_stream (ob->main_stream,
2493 jump_func->value.ancestor.formal_id);
2494 break;
2495 case IPA_JF_CONST_MEMBER_PTR:
2496 lto_output_tree (ob, jump_func->value.member_cst.pfn, true);
2497 lto_output_tree (ob, jump_func->value.member_cst.delta, false);
2498 break;
2499 }
2500 }
2501
2502 /* Read in jump function JUMP_FUNC from IB. */
2503
2504 static void
2505 ipa_read_jump_function (struct lto_input_block *ib,
2506 struct ipa_jump_func *jump_func,
2507 struct data_in *data_in)
2508 {
2509 jump_func->type = (enum jump_func_type) lto_input_uleb128 (ib);
2510
2511 switch (jump_func->type)
2512 {
2513 case IPA_JF_UNKNOWN:
2514 break;
2515 case IPA_JF_KNOWN_TYPE:
2516 jump_func->value.base_binfo = lto_input_tree (ib, data_in);
2517 break;
2518 case IPA_JF_CONST:
2519 jump_func->value.constant = lto_input_tree (ib, data_in);
2520 break;
2521 case IPA_JF_PASS_THROUGH:
2522 jump_func->value.pass_through.operand = lto_input_tree (ib, data_in);
2523 jump_func->value.pass_through.formal_id = lto_input_uleb128 (ib);
2524 jump_func->value.pass_through.operation = (enum tree_code) lto_input_uleb128 (ib);
2525 break;
2526 case IPA_JF_ANCESTOR:
2527 jump_func->value.ancestor.offset = lto_input_uleb128 (ib);
2528 jump_func->value.ancestor.type = lto_input_tree (ib, data_in);
2529 jump_func->value.ancestor.formal_id = lto_input_uleb128 (ib);
2530 break;
2531 case IPA_JF_CONST_MEMBER_PTR:
2532 jump_func->value.member_cst.pfn = lto_input_tree (ib, data_in);
2533 jump_func->value.member_cst.delta = lto_input_tree (ib, data_in);
2534 break;
2535 }
2536 }
2537
2538 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2539 relevant to indirect inlining to OB. */
2540
2541 static void
2542 ipa_write_indirect_edge_info (struct output_block *ob,
2543 struct cgraph_edge *cs)
2544 {
2545 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2546 struct bitpack_d bp;
2547
2548 lto_output_sleb128_stream (ob->main_stream, ii->param_index);
2549 lto_output_sleb128_stream (ob->main_stream, ii->anc_offset);
2550 bp = bitpack_create (ob->main_stream);
2551 bp_pack_value (&bp, ii->polymorphic, 1);
2552 lto_output_bitpack (&bp);
2553
2554 if (ii->polymorphic)
2555 {
2556 lto_output_sleb128_stream (ob->main_stream, ii->otr_token);
2557 lto_output_tree (ob, ii->otr_type, true);
2558 lto_output_tree (ob, ii->thunk_delta, true);
2559 }
2560 }
2561
2562 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2563 relevant to indirect inlining from IB. */
2564
2565 static void
2566 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2567 struct data_in *data_in ATTRIBUTE_UNUSED,
2568 struct cgraph_edge *cs)
2569 {
2570 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2571 struct bitpack_d bp;
2572
2573 ii->param_index = (int) lto_input_sleb128 (ib);
2574 ii->anc_offset = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2575 bp = lto_input_bitpack (ib);
2576 ii->polymorphic = bp_unpack_value (&bp, 1);
2577 if (ii->polymorphic)
2578 {
2579 ii->otr_token = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2580 ii->otr_type = lto_input_tree (ib, data_in);
2581 ii->thunk_delta = lto_input_tree (ib, data_in);
2582 }
2583 }
2584
2585 /* Stream out NODE info to OB. */
2586
2587 static void
2588 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2589 {
2590 int node_ref;
2591 lto_cgraph_encoder_t encoder;
2592 struct ipa_node_params *info = IPA_NODE_REF (node);
2593 int j;
2594 struct cgraph_edge *e;
2595 struct bitpack_d bp;
2596
2597 encoder = ob->decl_state->cgraph_node_encoder;
2598 node_ref = lto_cgraph_encoder_encode (encoder, node);
2599 lto_output_uleb128_stream (ob->main_stream, node_ref);
2600
2601 bp = bitpack_create (ob->main_stream);
2602 bp_pack_value (&bp, info->called_with_var_arguments, 1);
2603 gcc_assert (info->uses_analysis_done
2604 || ipa_get_param_count (info) == 0);
2605 gcc_assert (!info->node_enqueued);
2606 gcc_assert (!info->ipcp_orig_node);
2607 for (j = 0; j < ipa_get_param_count (info); j++)
2608 bp_pack_value (&bp, info->params[j].used, 1);
2609 lto_output_bitpack (&bp);
2610 for (e = node->callees; e; e = e->next_callee)
2611 {
2612 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2613
2614 lto_output_uleb128_stream (ob->main_stream,
2615 ipa_get_cs_argument_count (args));
2616 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2617 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2618 }
2619 for (e = node->indirect_calls; e; e = e->next_callee)
2620 ipa_write_indirect_edge_info (ob, e);
2621 }
2622
2623 /* Srtream in NODE info from IB. */
2624
2625 static void
2626 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2627 struct data_in *data_in)
2628 {
2629 struct ipa_node_params *info = IPA_NODE_REF (node);
2630 int k;
2631 struct cgraph_edge *e;
2632 struct bitpack_d bp;
2633
2634 ipa_initialize_node_params (node);
2635
2636 bp = lto_input_bitpack (ib);
2637 info->called_with_var_arguments = bp_unpack_value (&bp, 1);
2638 if (ipa_get_param_count (info) != 0)
2639 info->uses_analysis_done = true;
2640 info->node_enqueued = false;
2641 for (k = 0; k < ipa_get_param_count (info); k++)
2642 info->params[k].used = bp_unpack_value (&bp, 1);
2643 for (e = node->callees; e; e = e->next_callee)
2644 {
2645 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2646 int count = lto_input_uleb128 (ib);
2647
2648 ipa_set_cs_argument_count (args, count);
2649 if (!count)
2650 continue;
2651
2652 args->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
2653 (ipa_get_cs_argument_count (args));
2654 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2655 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2656 }
2657 for (e = node->indirect_calls; e; e = e->next_callee)
2658 ipa_read_indirect_edge_info (ib, data_in, e);
2659 }
2660
2661 /* Write jump functions for nodes in SET. */
2662
2663 void
2664 ipa_prop_write_jump_functions (cgraph_node_set set)
2665 {
2666 struct cgraph_node *node;
2667 struct output_block *ob = create_output_block (LTO_section_jump_functions);
2668 unsigned int count = 0;
2669 cgraph_node_set_iterator csi;
2670
2671 ob->cgraph_node = NULL;
2672
2673 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2674 {
2675 node = csi_node (csi);
2676 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2677 count++;
2678 }
2679
2680 lto_output_uleb128_stream (ob->main_stream, count);
2681
2682 /* Process all of the functions. */
2683 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2684 {
2685 node = csi_node (csi);
2686 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2687 ipa_write_node_info (ob, node);
2688 }
2689 lto_output_1_stream (ob->main_stream, 0);
2690 produce_asm (ob, NULL);
2691 destroy_output_block (ob);
2692 }
2693
2694 /* Read section in file FILE_DATA of length LEN with data DATA. */
2695
2696 static void
2697 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
2698 size_t len)
2699 {
2700 const struct lto_function_header *header =
2701 (const struct lto_function_header *) data;
2702 const int32_t cfg_offset = sizeof (struct lto_function_header);
2703 const int32_t main_offset = cfg_offset + header->cfg_size;
2704 const int32_t string_offset = main_offset + header->main_size;
2705 struct data_in *data_in;
2706 struct lto_input_block ib_main;
2707 unsigned int i;
2708 unsigned int count;
2709
2710 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2711 header->main_size);
2712
2713 data_in =
2714 lto_data_in_create (file_data, (const char *) data + string_offset,
2715 header->string_size, NULL);
2716 count = lto_input_uleb128 (&ib_main);
2717
2718 for (i = 0; i < count; i++)
2719 {
2720 unsigned int index;
2721 struct cgraph_node *node;
2722 lto_cgraph_encoder_t encoder;
2723
2724 index = lto_input_uleb128 (&ib_main);
2725 encoder = file_data->cgraph_node_encoder;
2726 node = lto_cgraph_encoder_deref (encoder, index);
2727 gcc_assert (node->analyzed);
2728 ipa_read_node_info (&ib_main, node, data_in);
2729 }
2730 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
2731 len);
2732 lto_data_in_delete (data_in);
2733 }
2734
2735 /* Read ipcp jump functions. */
2736
2737 void
2738 ipa_prop_read_jump_functions (void)
2739 {
2740 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2741 struct lto_file_decl_data *file_data;
2742 unsigned int j = 0;
2743
2744 ipa_check_create_node_params ();
2745 ipa_check_create_edge_args ();
2746 ipa_register_cgraph_hooks ();
2747
2748 while ((file_data = file_data_vec[j++]))
2749 {
2750 size_t len;
2751 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
2752
2753 if (data)
2754 ipa_prop_read_section (file_data, data, len);
2755 }
2756 }
2757
2758 /* After merging units, we can get mismatch in argument counts.
2759 Also decl merging might've rendered parameter lists obsolette.
2760 Also compute called_with_variable_arg info. */
2761
2762 void
2763 ipa_update_after_lto_read (void)
2764 {
2765 struct cgraph_node *node;
2766 struct cgraph_edge *cs;
2767
2768 ipa_check_create_node_params ();
2769 ipa_check_create_edge_args ();
2770
2771 for (node = cgraph_nodes; node; node = node->next)
2772 if (node->analyzed)
2773 ipa_initialize_node_params (node);
2774
2775 for (node = cgraph_nodes; node; node = node->next)
2776 if (node->analyzed)
2777 for (cs = node->callees; cs; cs = cs->next_callee)
2778 {
2779 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
2780 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
2781 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
2782 }
2783 }