ipa-cp.c (ipcp_update_callgraph): Use ipa_is_param_used.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "timevar.h"
36 #include "flags.h"
37 #include "diagnostic.h"
38 #include "lto-streamer.h"
39
40 /* Vector where the parameter infos are actually stored. */
41 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
42 /* Vector where the parameter infos are actually stored. */
43 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
44
45 /* Bitmap with all UIDs of call graph edges that have been already processed
46 by indirect inlining. */
47 static bitmap iinlining_processed_edges;
48
49 /* Holders of ipa cgraph hooks: */
50 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
51 static struct cgraph_node_hook_list *node_removal_hook_holder;
52 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
53 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
54
55 /* Add cgraph NODE described by INFO to the worklist WL regardless of whether
56 it is in one or not. It should almost never be used directly, as opposed to
57 ipa_push_func_to_list. */
58
59 void
60 ipa_push_func_to_list_1 (struct ipa_func_list **wl,
61 struct cgraph_node *node,
62 struct ipa_node_params *info)
63 {
64 struct ipa_func_list *temp;
65
66 info->node_enqueued = 1;
67 temp = XCNEW (struct ipa_func_list);
68 temp->node = node;
69 temp->next = *wl;
70 *wl = temp;
71 }
72
73 /* Initialize worklist to contain all functions. */
74
75 struct ipa_func_list *
76 ipa_init_func_list (void)
77 {
78 struct cgraph_node *node;
79 struct ipa_func_list * wl;
80
81 wl = NULL;
82 for (node = cgraph_nodes; node; node = node->next)
83 if (node->analyzed)
84 {
85 struct ipa_node_params *info = IPA_NODE_REF (node);
86 /* Unreachable nodes should have been eliminated before ipcp and
87 inlining. */
88 gcc_assert (node->needed || node->reachable);
89 ipa_push_func_to_list_1 (&wl, node, info);
90 }
91
92 return wl;
93 }
94
95 /* Remove a function from the worklist WL and return it. */
96
97 struct cgraph_node *
98 ipa_pop_func_from_list (struct ipa_func_list **wl)
99 {
100 struct ipa_node_params *info;
101 struct ipa_func_list *first;
102 struct cgraph_node *node;
103
104 first = *wl;
105 *wl = (*wl)->next;
106 node = first->node;
107 free (first);
108
109 info = IPA_NODE_REF (node);
110 info->node_enqueued = 0;
111 return node;
112 }
113
114 /* Return index of the formal whose tree is PTREE in function which corresponds
115 to INFO. */
116
117 static int
118 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
119 {
120 int i, count;
121
122 count = ipa_get_param_count (info);
123 for (i = 0; i < count; i++)
124 if (ipa_get_param(info, i) == ptree)
125 return i;
126
127 return -1;
128 }
129
130 /* Populate the param_decl field in parameter descriptors of INFO that
131 corresponds to NODE. */
132
133 static void
134 ipa_populate_param_decls (struct cgraph_node *node,
135 struct ipa_node_params *info)
136 {
137 tree fndecl;
138 tree fnargs;
139 tree parm;
140 int param_num;
141
142 fndecl = node->decl;
143 fnargs = DECL_ARGUMENTS (fndecl);
144 param_num = 0;
145 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
146 {
147 info->params[param_num].decl = parm;
148 param_num++;
149 }
150 }
151
152 /* Return how many formal parameters FNDECL has. */
153
154 static inline int
155 count_formal_params_1 (tree fndecl)
156 {
157 tree parm;
158 int count = 0;
159
160 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = TREE_CHAIN (parm))
161 count++;
162
163 return count;
164 }
165
166 /* Count number of formal parameters in NOTE. Store the result to the
167 appropriate field of INFO. */
168
169 static void
170 ipa_count_formal_params (struct cgraph_node *node,
171 struct ipa_node_params *info)
172 {
173 int param_num;
174
175 param_num = count_formal_params_1 (node->decl);
176 ipa_set_param_count (info, param_num);
177 }
178
179 /* Initialize the ipa_node_params structure associated with NODE by counting
180 the function parameters, creating the descriptors and populating their
181 param_decls. */
182
183 void
184 ipa_initialize_node_params (struct cgraph_node *node)
185 {
186 struct ipa_node_params *info = IPA_NODE_REF (node);
187
188 if (!info->params)
189 {
190 ipa_count_formal_params (node, info);
191 info->params = XCNEWVEC (struct ipa_param_descriptor,
192 ipa_get_param_count (info));
193 ipa_populate_param_decls (node, info);
194 }
195 }
196
197 /* Callback of walk_stmt_load_store_addr_ops for the visit_store and visit_addr
198 parameters. If OP is a parameter declaration, mark it as modified in the
199 info structure passed in DATA. */
200
201 static bool
202 visit_store_addr_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
203 tree op, void *data)
204 {
205 struct ipa_node_params *info = (struct ipa_node_params *) data;
206
207 op = get_base_address (op);
208 if (op
209 && TREE_CODE (op) == PARM_DECL)
210 {
211 int index = ipa_get_param_decl_index (info, op);
212 gcc_assert (index >= 0);
213 info->params[index].modified = true;
214 info->params[index].used = true;
215 }
216
217 return false;
218 }
219
220 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
221 If OP is a parameter declaration, mark it as used in the info structure
222 passed in DATA. */
223
224 static bool
225 visit_load_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
226 tree op, void *data)
227 {
228 struct ipa_node_params *info = (struct ipa_node_params *) data;
229
230 op = get_base_address (op);
231 if (op
232 && TREE_CODE (op) == PARM_DECL)
233 {
234 int index = ipa_get_param_decl_index (info, op);
235 gcc_assert (index >= 0);
236 info->params[index].used = true;
237 }
238
239 return false;
240 }
241
242 /* Compute which formal parameters of function associated with NODE are locally
243 modified or their address is taken. Note that this does not apply on
244 parameters with SSA names but those can and should be analyzed
245 differently. */
246
247 void
248 ipa_detect_param_modifications (struct cgraph_node *node)
249 {
250 tree decl = node->decl;
251 basic_block bb;
252 struct function *func;
253 gimple_stmt_iterator gsi;
254 struct ipa_node_params *info = IPA_NODE_REF (node);
255 int i;
256
257 if (ipa_get_param_count (info) == 0 || info->modification_analysis_done)
258 return;
259
260 for (i = 0; i < ipa_get_param_count (info); i++)
261 {
262 tree parm = ipa_get_param (info, i);
263 /* For SSA regs see if parameter is used. For non-SSA we compute
264 the flag during modification analysis. */
265 if (is_gimple_reg (parm)
266 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
267 info->params[i].used = true;
268 }
269
270 func = DECL_STRUCT_FUNCTION (decl);
271 FOR_EACH_BB_FN (bb, func)
272 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
273 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
274 visit_load_for_mod_analysis,
275 visit_store_addr_for_mod_analysis,
276 visit_store_addr_for_mod_analysis);
277
278 info->modification_analysis_done = 1;
279 }
280
281 /* Count number of arguments callsite CS has and store it in
282 ipa_edge_args structure corresponding to this callsite. */
283
284 void
285 ipa_count_arguments (struct cgraph_edge *cs)
286 {
287 gimple stmt;
288 int arg_num;
289
290 stmt = cs->call_stmt;
291 gcc_assert (is_gimple_call (stmt));
292 arg_num = gimple_call_num_args (stmt);
293 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
294 <= (unsigned) cgraph_edge_max_uid)
295 VEC_safe_grow_cleared (ipa_edge_args_t, gc,
296 ipa_edge_args_vector, cgraph_edge_max_uid + 1);
297 ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
298 }
299
300 /* Print the jump functions of all arguments on all call graph edges going from
301 NODE to file F. */
302
303 void
304 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
305 {
306 int i, count;
307 struct cgraph_edge *cs;
308 struct ipa_jump_func *jump_func;
309 enum jump_func_type type;
310
311 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
312 for (cs = node->callees; cs; cs = cs->next_callee)
313 {
314 if (!ipa_edge_args_info_available_for_edge_p (cs))
315 continue;
316
317 fprintf (f, " callsite %s ", cgraph_node_name (node));
318 fprintf (f, "-> %s :: \n", cgraph_node_name (cs->callee));
319
320 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
321 for (i = 0; i < count; i++)
322 {
323 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
324 type = jump_func->type;
325
326 fprintf (f, " param %d: ", i);
327 if (type == IPA_JF_UNKNOWN)
328 fprintf (f, "UNKNOWN\n");
329 else if (type == IPA_JF_KNOWN_TYPE)
330 {
331 tree binfo_type = TREE_TYPE (jump_func->value.base_binfo);
332 fprintf (f, "KNOWN TYPE, type in binfo is: ");
333 print_generic_expr (f, binfo_type, 0);
334 fprintf (f, " (%u)\n", TYPE_UID (binfo_type));
335 }
336 else if (type == IPA_JF_CONST)
337 {
338 tree val = jump_func->value.constant;
339 fprintf (f, "CONST: ");
340 print_generic_expr (f, val, 0);
341 if (TREE_CODE (val) == ADDR_EXPR
342 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
343 {
344 fprintf (f, " -> ");
345 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
346 0);
347 }
348 fprintf (f, "\n");
349 }
350 else if (type == IPA_JF_CONST_MEMBER_PTR)
351 {
352 fprintf (f, "CONST MEMBER PTR: ");
353 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
354 fprintf (f, ", ");
355 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
356 fprintf (f, "\n");
357 }
358 else if (type == IPA_JF_PASS_THROUGH)
359 {
360 fprintf (f, "PASS THROUGH: ");
361 fprintf (f, "%d, op %s ",
362 jump_func->value.pass_through.formal_id,
363 tree_code_name[(int)
364 jump_func->value.pass_through.operation]);
365 if (jump_func->value.pass_through.operation != NOP_EXPR)
366 print_generic_expr (dump_file,
367 jump_func->value.pass_through.operand, 0);
368 fprintf (dump_file, "\n");
369 }
370 else if (type == IPA_JF_ANCESTOR)
371 {
372 fprintf (f, "ANCESTOR: ");
373 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
374 jump_func->value.ancestor.formal_id,
375 jump_func->value.ancestor.offset);
376 print_generic_expr (f, jump_func->value.ancestor.type, 0);
377 fprintf (dump_file, "\n");
378 }
379 }
380 }
381 }
382
383 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
384
385 void
386 ipa_print_all_jump_functions (FILE *f)
387 {
388 struct cgraph_node *node;
389
390 fprintf (f, "\nJump functions:\n");
391 for (node = cgraph_nodes; node; node = node->next)
392 {
393 ipa_print_node_jump_functions (f, node);
394 }
395 }
396
397 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
398 of an assignment statement STMT, try to find out whether NAME can be
399 described by a (possibly polynomial) pass-through jump-function or an
400 ancestor jump function and if so, write the appropriate function into
401 JFUNC */
402
403 static void
404 compute_complex_assign_jump_func (struct ipa_node_params *info,
405 struct ipa_jump_func *jfunc,
406 gimple stmt, tree name)
407 {
408 HOST_WIDE_INT offset, size, max_size;
409 tree op1, op2, type;
410 int index;
411
412 op1 = gimple_assign_rhs1 (stmt);
413 op2 = gimple_assign_rhs2 (stmt);
414
415 if (TREE_CODE (op1) == SSA_NAME
416 && SSA_NAME_IS_DEFAULT_DEF (op1))
417 {
418 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
419 if (index < 0)
420 return;
421
422 if (op2)
423 {
424 if (!is_gimple_ip_invariant (op2)
425 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
426 && !useless_type_conversion_p (TREE_TYPE (name),
427 TREE_TYPE (op1))))
428 return;
429
430 jfunc->type = IPA_JF_PASS_THROUGH;
431 jfunc->value.pass_through.formal_id = index;
432 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
433 jfunc->value.pass_through.operand = op2;
434 }
435 else if (gimple_assign_unary_nop_p (stmt))
436 {
437 jfunc->type = IPA_JF_PASS_THROUGH;
438 jfunc->value.pass_through.formal_id = index;
439 jfunc->value.pass_through.operation = NOP_EXPR;
440 }
441 return;
442 }
443
444 if (TREE_CODE (op1) != ADDR_EXPR)
445 return;
446
447 op1 = TREE_OPERAND (op1, 0);
448 type = TREE_TYPE (op1);
449 if (TREE_CODE (type) != RECORD_TYPE)
450 return;
451 op1 = get_ref_base_and_extent (op1, &offset, &size, &max_size);
452 if (TREE_CODE (op1) != INDIRECT_REF
453 /* If this is a varying address, punt. */
454 || max_size == -1
455 || max_size != size)
456 return;
457 op1 = TREE_OPERAND (op1, 0);
458 if (TREE_CODE (op1) != SSA_NAME
459 || !SSA_NAME_IS_DEFAULT_DEF (op1))
460 return;
461
462 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
463 if (index >= 0)
464 {
465 jfunc->type = IPA_JF_ANCESTOR;
466 jfunc->value.ancestor.formal_id = index;
467 jfunc->value.ancestor.offset = offset;
468 jfunc->value.ancestor.type = type;
469 }
470 }
471
472
473 /* Given that an actual argument is an SSA_NAME that is a result of a phi
474 statement PHI, try to find out whether NAME is in fact a
475 multiple-inheritance typecast from a descendant into an ancestor of a formal
476 parameter and thus can be described by an ancestor jump function and if so,
477 write the appropriate function into JFUNC.
478
479 Essentially we want to match the following pattern:
480
481 if (obj_2(D) != 0B)
482 goto <bb 3>;
483 else
484 goto <bb 4>;
485
486 <bb 3>:
487 iftmp.1_3 = &obj_2(D)->D.1762;
488
489 <bb 4>:
490 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
491 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
492 return D.1879_6; */
493
494 static void
495 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
496 struct ipa_jump_func *jfunc,
497 gimple phi)
498 {
499 HOST_WIDE_INT offset, size, max_size;
500 gimple assign, cond;
501 basic_block phi_bb, assign_bb, cond_bb;
502 tree tmp, parm, expr;
503 int index, i;
504
505 if (gimple_phi_num_args (phi) != 2
506 || !integer_zerop (PHI_ARG_DEF (phi, 1)))
507 return;
508
509 tmp = PHI_ARG_DEF (phi, 0);
510 if (TREE_CODE (tmp) != SSA_NAME
511 || SSA_NAME_IS_DEFAULT_DEF (tmp)
512 || !POINTER_TYPE_P (TREE_TYPE (tmp))
513 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
514 return;
515
516 assign = SSA_NAME_DEF_STMT (tmp);
517 assign_bb = gimple_bb (assign);
518 if (!single_pred_p (assign_bb)
519 || !gimple_assign_single_p (assign))
520 return;
521 expr = gimple_assign_rhs1 (assign);
522
523 if (TREE_CODE (expr) != ADDR_EXPR)
524 return;
525 expr = TREE_OPERAND (expr, 0);
526 expr = get_ref_base_and_extent (expr, &offset, &size, &max_size);
527
528 if (TREE_CODE (expr) != INDIRECT_REF
529 /* If this is a varying address, punt. */
530 || max_size == -1
531 || max_size != size)
532 return;
533 parm = TREE_OPERAND (expr, 0);
534 if (TREE_CODE (parm) != SSA_NAME
535 || !SSA_NAME_IS_DEFAULT_DEF (parm))
536 return;
537
538 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
539 if (index < 0)
540 return;
541
542 cond_bb = single_pred (assign_bb);
543 cond = last_stmt (cond_bb);
544 if (gimple_code (cond) != GIMPLE_COND
545 || gimple_cond_code (cond) != NE_EXPR
546 || gimple_cond_lhs (cond) != parm
547 || !integer_zerop (gimple_cond_rhs (cond)))
548 return;
549
550
551 phi_bb = gimple_bb (phi);
552 for (i = 0; i < 2; i++)
553 {
554 basic_block pred = EDGE_PRED (phi_bb, i)->src;
555 if (pred != assign_bb && pred != cond_bb)
556 return;
557 }
558
559 jfunc->type = IPA_JF_ANCESTOR;
560 jfunc->value.ancestor.formal_id = index;
561 jfunc->value.ancestor.offset = offset;
562 jfunc->value.ancestor.type = TREE_TYPE (TREE_TYPE (tmp));
563 }
564
565 /* Given OP whch is passed as an actual argument to a called function,
566 determine if it is possible to construct a KNOWN_TYPE jump function for it
567 and if so, create one and store it to JFUNC. */
568
569 static void
570 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc)
571 {
572 tree binfo;
573
574 if (TREE_CODE (op) != ADDR_EXPR)
575 return;
576
577 op = TREE_OPERAND (op, 0);
578 binfo = gimple_get_relevant_ref_binfo (op, NULL_TREE);
579 if (binfo)
580 {
581 jfunc->type = IPA_JF_KNOWN_TYPE;
582 jfunc->value.base_binfo = binfo;
583 }
584 }
585
586
587 /* Determine the jump functions of scalar arguments. Scalar means SSA names
588 and constants of a number of selected types. INFO is the ipa_node_params
589 structure associated with the caller, FUNCTIONS is a pointer to an array of
590 jump function structures associated with CALL which is the call statement
591 being examined.*/
592
593 static void
594 compute_scalar_jump_functions (struct ipa_node_params *info,
595 struct ipa_jump_func *functions,
596 gimple call)
597 {
598 tree arg;
599 unsigned num = 0;
600
601 for (num = 0; num < gimple_call_num_args (call); num++)
602 {
603 arg = gimple_call_arg (call, num);
604
605 if (is_gimple_ip_invariant (arg))
606 {
607 functions[num].type = IPA_JF_CONST;
608 functions[num].value.constant = arg;
609 }
610 else if (TREE_CODE (arg) == SSA_NAME)
611 {
612 if (SSA_NAME_IS_DEFAULT_DEF (arg))
613 {
614 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
615
616 if (index >= 0)
617 {
618 functions[num].type = IPA_JF_PASS_THROUGH;
619 functions[num].value.pass_through.formal_id = index;
620 functions[num].value.pass_through.operation = NOP_EXPR;
621 }
622 }
623 else
624 {
625 gimple stmt = SSA_NAME_DEF_STMT (arg);
626 if (is_gimple_assign (stmt))
627 compute_complex_assign_jump_func (info, &functions[num],
628 stmt, arg);
629 else if (gimple_code (stmt) == GIMPLE_PHI)
630 compute_complex_ancestor_jump_func (info, &functions[num],
631 stmt);
632 }
633 }
634 else
635 compute_known_type_jump_func (arg, &functions[num]);
636 }
637 }
638
639 /* Inspect the given TYPE and return true iff it has the same structure (the
640 same number of fields of the same types) as a C++ member pointer. If
641 METHOD_PTR and DELTA are non-NULL, store the trees representing the
642 corresponding fields there. */
643
644 static bool
645 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
646 {
647 tree fld;
648
649 if (TREE_CODE (type) != RECORD_TYPE)
650 return false;
651
652 fld = TYPE_FIELDS (type);
653 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
654 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
655 return false;
656
657 if (method_ptr)
658 *method_ptr = fld;
659
660 fld = TREE_CHAIN (fld);
661 if (!fld || INTEGRAL_TYPE_P (fld))
662 return false;
663 if (delta)
664 *delta = fld;
665
666 if (TREE_CHAIN (fld))
667 return false;
668
669 return true;
670 }
671
672 /* Go through arguments of the CALL and for every one that looks like a member
673 pointer, check whether it can be safely declared pass-through and if so,
674 mark that to the corresponding item of jump FUNCTIONS. Return true iff
675 there are non-pass-through member pointers within the arguments. INFO
676 describes formal parameters of the caller. */
677
678 static bool
679 compute_pass_through_member_ptrs (struct ipa_node_params *info,
680 struct ipa_jump_func *functions,
681 gimple call)
682 {
683 bool undecided_members = false;
684 unsigned num;
685 tree arg;
686
687 for (num = 0; num < gimple_call_num_args (call); num++)
688 {
689 arg = gimple_call_arg (call, num);
690
691 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
692 {
693 if (TREE_CODE (arg) == PARM_DECL)
694 {
695 int index = ipa_get_param_decl_index (info, arg);
696
697 gcc_assert (index >=0);
698 if (!ipa_is_param_modified (info, index))
699 {
700 functions[num].type = IPA_JF_PASS_THROUGH;
701 functions[num].value.pass_through.formal_id = index;
702 functions[num].value.pass_through.operation = NOP_EXPR;
703 }
704 else
705 undecided_members = true;
706 }
707 else
708 undecided_members = true;
709 }
710 }
711
712 return undecided_members;
713 }
714
715 /* Simple function filling in a member pointer constant jump function (with PFN
716 and DELTA as the constant value) into JFUNC. */
717
718 static void
719 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
720 tree pfn, tree delta)
721 {
722 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
723 jfunc->value.member_cst.pfn = pfn;
724 jfunc->value.member_cst.delta = delta;
725 }
726
727 /* If RHS is an SSA_NAMe and it is defined by a simple copy assign statement,
728 return the rhs of its defining statement. */
729
730 static inline tree
731 get_ssa_def_if_simple_copy (tree rhs)
732 {
733 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
734 {
735 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
736
737 if (gimple_assign_single_p (def_stmt))
738 rhs = gimple_assign_rhs1 (def_stmt);
739 else
740 break;
741 }
742 return rhs;
743 }
744
745 /* Traverse statements from CALL backwards, scanning whether the argument ARG
746 which is a member pointer is filled in with constant values. If it is, fill
747 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
748 fields of the record type of the member pointer. To give an example, we
749 look for a pattern looking like the following:
750
751 D.2515.__pfn ={v} printStuff;
752 D.2515.__delta ={v} 0;
753 i_1 = doprinting (D.2515); */
754
755 static void
756 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
757 tree delta_field, struct ipa_jump_func *jfunc)
758 {
759 gimple_stmt_iterator gsi;
760 tree method = NULL_TREE;
761 tree delta = NULL_TREE;
762
763 gsi = gsi_for_stmt (call);
764
765 gsi_prev (&gsi);
766 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
767 {
768 gimple stmt = gsi_stmt (gsi);
769 tree lhs, rhs, fld;
770
771 if (!gimple_assign_single_p (stmt))
772 return;
773
774 lhs = gimple_assign_lhs (stmt);
775 rhs = gimple_assign_rhs1 (stmt);
776
777 if (TREE_CODE (lhs) != COMPONENT_REF
778 || TREE_OPERAND (lhs, 0) != arg)
779 continue;
780
781 fld = TREE_OPERAND (lhs, 1);
782 if (!method && fld == method_field)
783 {
784 rhs = get_ssa_def_if_simple_copy (rhs);
785 if (TREE_CODE (rhs) == ADDR_EXPR
786 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
787 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
788 {
789 method = TREE_OPERAND (rhs, 0);
790 if (delta)
791 {
792 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
793 return;
794 }
795 }
796 else
797 return;
798 }
799
800 if (!delta && fld == delta_field)
801 {
802 rhs = get_ssa_def_if_simple_copy (rhs);
803 if (TREE_CODE (rhs) == INTEGER_CST)
804 {
805 delta = rhs;
806 if (method)
807 {
808 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
809 return;
810 }
811 }
812 else
813 return;
814 }
815 }
816
817 return;
818 }
819
820 /* Go through the arguments of the CALL and for every member pointer within
821 tries determine whether it is a constant. If it is, create a corresponding
822 constant jump function in FUNCTIONS which is an array of jump functions
823 associated with the call. */
824
825 static void
826 compute_cst_member_ptr_arguments (struct ipa_jump_func *functions,
827 gimple call)
828 {
829 unsigned num;
830 tree arg, method_field, delta_field;
831
832 for (num = 0; num < gimple_call_num_args (call); num++)
833 {
834 arg = gimple_call_arg (call, num);
835
836 if (functions[num].type == IPA_JF_UNKNOWN
837 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
838 &delta_field))
839 determine_cst_member_ptr (call, arg, method_field, delta_field,
840 &functions[num]);
841 }
842 }
843
844 /* Compute jump function for all arguments of callsite CS and insert the
845 information in the jump_functions array in the ipa_edge_args corresponding
846 to this callsite. */
847
848 void
849 ipa_compute_jump_functions (struct cgraph_edge *cs)
850 {
851 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
852 struct ipa_edge_args *arguments = IPA_EDGE_REF (cs);
853 gimple call;
854
855 if (ipa_get_cs_argument_count (arguments) == 0 || arguments->jump_functions)
856 return;
857 arguments->jump_functions = GGC_CNEWVEC (struct ipa_jump_func,
858 ipa_get_cs_argument_count (arguments));
859
860 call = cs->call_stmt;
861 gcc_assert (is_gimple_call (call));
862
863 /* We will deal with constants and SSA scalars first: */
864 compute_scalar_jump_functions (info, arguments->jump_functions, call);
865
866 /* Let's check whether there are any potential member pointers and if so,
867 whether we can determine their functions as pass_through. */
868 if (!compute_pass_through_member_ptrs (info, arguments->jump_functions, call))
869 return;
870
871 /* Finally, let's check whether we actually pass a new constant member
872 pointer here... */
873 compute_cst_member_ptr_arguments (arguments->jump_functions, call);
874 }
875
876 /* If RHS looks like a rhs of a statement loading pfn from a member
877 pointer formal parameter, return the parameter, otherwise return
878 NULL. If USE_DELTA, then we look for a use of the delta field
879 rather than the pfn. */
880
881 static tree
882 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
883 {
884 tree rec, fld;
885 tree ptr_field;
886 tree delta_field;
887
888 if (TREE_CODE (rhs) != COMPONENT_REF)
889 return NULL_TREE;
890
891 rec = TREE_OPERAND (rhs, 0);
892 if (TREE_CODE (rec) != PARM_DECL
893 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
894 return NULL_TREE;
895
896 fld = TREE_OPERAND (rhs, 1);
897 if (use_delta ? (fld == delta_field) : (fld == ptr_field))
898 return rec;
899 else
900 return NULL_TREE;
901 }
902
903 /* If STMT looks like a statement loading a value from a member pointer formal
904 parameter, this function returns that parameter. */
905
906 static tree
907 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
908 {
909 tree rhs;
910
911 if (!gimple_assign_single_p (stmt))
912 return NULL_TREE;
913
914 rhs = gimple_assign_rhs1 (stmt);
915 return ipa_get_member_ptr_load_param (rhs, use_delta);
916 }
917
918 /* Returns true iff T is an SSA_NAME defined by a statement. */
919
920 static bool
921 ipa_is_ssa_with_stmt_def (tree t)
922 {
923 if (TREE_CODE (t) == SSA_NAME
924 && !SSA_NAME_IS_DEFAULT_DEF (t))
925 return true;
926 else
927 return false;
928 }
929
930 /* Find the indirect call graph edge corresponding to STMT and add to it all
931 information necessary to describe a call to a parameter number PARAM_INDEX.
932 NODE is the caller. POLYMORPHIC should be set to true iff the call is a
933 virtual one. */
934
935 static void
936 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt,
937 bool polymorphic)
938 {
939 struct cgraph_edge *cs;
940
941 cs = cgraph_edge (node, stmt);
942 cs->indirect_info->param_index = param_index;
943 cs->indirect_info->anc_offset = 0;
944 cs->indirect_info->polymorphic = polymorphic;
945 if (polymorphic)
946 {
947 tree otr = gimple_call_fn (stmt);
948 tree type, token = OBJ_TYPE_REF_TOKEN (otr);
949 cs->indirect_info->otr_token = tree_low_cst (token, 1);
950 type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (otr)));
951 cs->indirect_info->otr_type = type;
952 }
953 }
954
955 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
956 (described by INFO). Currently it checks whether the call calls a pointer
957 that is a formal parameter and if so, the parameter is marked with the
958 called flag and an indirect call graph edge describing the call is created.
959 This is very simple for ordinary pointers represented in SSA but not-so-nice
960 when it comes to member pointers. The ugly part of this function does
961 nothing more than trying to match the pattern of such a call. An example of
962 such a pattern is the gimple dump below, the call is on the last line:
963
964 <bb 2>:
965 f$__delta_5 = f.__delta;
966 f$__pfn_24 = f.__pfn;
967 D.2496_3 = (int) f$__pfn_24;
968 D.2497_4 = D.2496_3 & 1;
969 if (D.2497_4 != 0)
970 goto <bb 3>;
971 else
972 goto <bb 4>;
973
974 <bb 3>:
975 D.2500_7 = (unsigned int) f$__delta_5;
976 D.2501_8 = &S + D.2500_7;
977 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
978 D.2503_10 = *D.2502_9;
979 D.2504_12 = f$__pfn_24 + -1;
980 D.2505_13 = (unsigned int) D.2504_12;
981 D.2506_14 = D.2503_10 + D.2505_13;
982 D.2507_15 = *D.2506_14;
983 iftmp.11_16 = (String:: *) D.2507_15;
984
985 <bb 4>:
986 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
987 D.2500_19 = (unsigned int) f$__delta_5;
988 D.2508_20 = &S + D.2500_19;
989 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
990
991 Such patterns are results of simple calls to a member pointer:
992
993 int doprinting (int (MyString::* f)(int) const)
994 {
995 MyString S ("somestring");
996
997 return (S.*f)(4);
998 }
999 */
1000
1001 static void
1002 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1003 struct ipa_node_params *info,
1004 gimple call, tree target)
1005 {
1006 gimple def;
1007 tree n1, n2;
1008 gimple d1, d2;
1009 tree rec, rec2, cond;
1010 gimple branch;
1011 int index;
1012 basic_block bb, virt_bb, join;
1013
1014 if (SSA_NAME_IS_DEFAULT_DEF (target))
1015 {
1016 tree var = SSA_NAME_VAR (target);
1017 index = ipa_get_param_decl_index (info, var);
1018 if (index >= 0)
1019 ipa_note_param_call (node, index, call, false);
1020 return;
1021 }
1022
1023 /* Now we need to try to match the complex pattern of calling a member
1024 pointer. */
1025
1026 if (!POINTER_TYPE_P (TREE_TYPE (target))
1027 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1028 return;
1029
1030 def = SSA_NAME_DEF_STMT (target);
1031 if (gimple_code (def) != GIMPLE_PHI)
1032 return;
1033
1034 if (gimple_phi_num_args (def) != 2)
1035 return;
1036
1037 /* First, we need to check whether one of these is a load from a member
1038 pointer that is a parameter to this function. */
1039 n1 = PHI_ARG_DEF (def, 0);
1040 n2 = PHI_ARG_DEF (def, 1);
1041 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1042 return;
1043 d1 = SSA_NAME_DEF_STMT (n1);
1044 d2 = SSA_NAME_DEF_STMT (n2);
1045
1046 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1047 {
1048 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1049 return;
1050
1051 bb = gimple_bb (d1);
1052 virt_bb = gimple_bb (d2);
1053 }
1054 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1055 {
1056 bb = gimple_bb (d2);
1057 virt_bb = gimple_bb (d1);
1058 }
1059 else
1060 return;
1061
1062 /* Second, we need to check that the basic blocks are laid out in the way
1063 corresponding to the pattern. */
1064
1065 join = gimple_bb (def);
1066 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1067 || single_pred (virt_bb) != bb
1068 || single_succ (virt_bb) != join)
1069 return;
1070
1071 /* Third, let's see that the branching is done depending on the least
1072 significant bit of the pfn. */
1073
1074 branch = last_stmt (bb);
1075 if (gimple_code (branch) != GIMPLE_COND)
1076 return;
1077
1078 if (gimple_cond_code (branch) != NE_EXPR
1079 || !integer_zerop (gimple_cond_rhs (branch)))
1080 return;
1081
1082 cond = gimple_cond_lhs (branch);
1083 if (!ipa_is_ssa_with_stmt_def (cond))
1084 return;
1085
1086 def = SSA_NAME_DEF_STMT (cond);
1087 if (!is_gimple_assign (def)
1088 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1089 || !integer_onep (gimple_assign_rhs2 (def)))
1090 return;
1091
1092 cond = gimple_assign_rhs1 (def);
1093 if (!ipa_is_ssa_with_stmt_def (cond))
1094 return;
1095
1096 def = SSA_NAME_DEF_STMT (cond);
1097
1098 if (is_gimple_assign (def)
1099 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1100 {
1101 cond = gimple_assign_rhs1 (def);
1102 if (!ipa_is_ssa_with_stmt_def (cond))
1103 return;
1104 def = SSA_NAME_DEF_STMT (cond);
1105 }
1106
1107 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1108 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1109 == ptrmemfunc_vbit_in_delta));
1110
1111 if (rec != rec2)
1112 return;
1113
1114 index = ipa_get_param_decl_index (info, rec);
1115 if (index >= 0 && !ipa_is_param_modified (info, index))
1116 ipa_note_param_call (node, index, call, false);
1117
1118 return;
1119 }
1120
1121 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1122 object referenced in the expression is a formal parameter of the caller
1123 (described by INFO), create a call note for the statement. */
1124
1125 static void
1126 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1127 struct ipa_node_params *info, gimple call,
1128 tree target)
1129 {
1130 tree obj = OBJ_TYPE_REF_OBJECT (target);
1131 tree var;
1132 int index;
1133
1134 if (TREE_CODE (obj) == ADDR_EXPR)
1135 {
1136 do
1137 {
1138 obj = TREE_OPERAND (obj, 0);
1139 }
1140 while (TREE_CODE (obj) == COMPONENT_REF);
1141 if (TREE_CODE (obj) != INDIRECT_REF)
1142 return;
1143 obj = TREE_OPERAND (obj, 0);
1144 }
1145
1146 if (TREE_CODE (obj) != SSA_NAME
1147 || !SSA_NAME_IS_DEFAULT_DEF (obj))
1148 return;
1149
1150 var = SSA_NAME_VAR (obj);
1151 index = ipa_get_param_decl_index (info, var);
1152
1153 if (index >= 0)
1154 ipa_note_param_call (node, index, call, true);
1155 }
1156
1157 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1158 of the caller (described by INFO). */
1159
1160 static void
1161 ipa_analyze_call_uses (struct cgraph_node *node,
1162 struct ipa_node_params *info, gimple call)
1163 {
1164 tree target = gimple_call_fn (call);
1165
1166 if (TREE_CODE (target) == SSA_NAME)
1167 ipa_analyze_indirect_call_uses (node, info, call, target);
1168 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1169 ipa_analyze_virtual_call_uses (node, info, call, target);
1170 }
1171
1172
1173 /* Analyze the call statement STMT with respect to formal parameters (described
1174 in INFO) of caller given by NODE. Currently it only checks whether formal
1175 parameters are called. */
1176
1177 static void
1178 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1179 gimple stmt)
1180 {
1181 if (is_gimple_call (stmt))
1182 ipa_analyze_call_uses (node, info, stmt);
1183 }
1184
1185 /* Scan the function body of NODE and inspect the uses of formal parameters.
1186 Store the findings in various structures of the associated ipa_node_params
1187 structure, such as parameter flags, notes etc. */
1188
1189 void
1190 ipa_analyze_params_uses (struct cgraph_node *node)
1191 {
1192 tree decl = node->decl;
1193 basic_block bb;
1194 struct function *func;
1195 gimple_stmt_iterator gsi;
1196 struct ipa_node_params *info = IPA_NODE_REF (node);
1197
1198 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1199 return;
1200
1201 func = DECL_STRUCT_FUNCTION (decl);
1202 FOR_EACH_BB_FN (bb, func)
1203 {
1204 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1205 {
1206 gimple stmt = gsi_stmt (gsi);
1207 ipa_analyze_stmt_uses (node, info, stmt);
1208 }
1209 }
1210
1211 info->uses_analysis_done = 1;
1212 }
1213
1214 /* Update the jump function DST when the call graph edge correspondng to SRC is
1215 is being inlined, knowing that DST is of type ancestor and src of known
1216 type. */
1217
1218 static void
1219 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1220 struct ipa_jump_func *dst)
1221 {
1222 tree new_binfo;
1223
1224 new_binfo = get_binfo_at_offset (src->value.base_binfo,
1225 dst->value.ancestor.offset,
1226 dst->value.ancestor.type);
1227 if (new_binfo)
1228 {
1229 dst->type = IPA_JF_KNOWN_TYPE;
1230 dst->value.base_binfo = new_binfo;
1231 }
1232 else
1233 dst->type = IPA_JF_UNKNOWN;
1234 }
1235
1236 /* Update the jump functions associated with call graph edge E when the call
1237 graph edge CS is being inlined, assuming that E->caller is already (possibly
1238 indirectly) inlined into CS->callee and that E has not been inlined. */
1239
1240 static void
1241 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1242 struct cgraph_edge *e)
1243 {
1244 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1245 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1246 int count = ipa_get_cs_argument_count (args);
1247 int i;
1248
1249 for (i = 0; i < count; i++)
1250 {
1251 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1252
1253 if (dst->type == IPA_JF_ANCESTOR)
1254 {
1255 struct ipa_jump_func *src;
1256
1257 /* Variable number of arguments can cause havoc if we try to access
1258 one that does not exist in the inlined edge. So make sure we
1259 don't. */
1260 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1261 {
1262 dst->type = IPA_JF_UNKNOWN;
1263 continue;
1264 }
1265
1266 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1267 if (src->type == IPA_JF_KNOWN_TYPE)
1268 combine_known_type_and_ancestor_jfs (src, dst);
1269 else if (src->type == IPA_JF_CONST)
1270 {
1271 struct ipa_jump_func kt_func;
1272
1273 kt_func.type = IPA_JF_UNKNOWN;
1274 compute_known_type_jump_func (src->value.constant, &kt_func);
1275 if (kt_func.type == IPA_JF_KNOWN_TYPE)
1276 combine_known_type_and_ancestor_jfs (&kt_func, dst);
1277 else
1278 dst->type = IPA_JF_UNKNOWN;
1279 }
1280 else if (src->type == IPA_JF_PASS_THROUGH
1281 && src->value.pass_through.operation == NOP_EXPR)
1282 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1283 else if (src->type == IPA_JF_ANCESTOR)
1284 {
1285 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1286 dst->value.ancestor.offset += src->value.ancestor.offset;
1287 }
1288 else
1289 dst->type = IPA_JF_UNKNOWN;
1290 }
1291 else if (dst->type == IPA_JF_PASS_THROUGH)
1292 {
1293 struct ipa_jump_func *src;
1294 /* We must check range due to calls with variable number of arguments
1295 and we cannot combine jump functions with operations. */
1296 if (dst->value.pass_through.operation == NOP_EXPR
1297 && (dst->value.pass_through.formal_id
1298 < ipa_get_cs_argument_count (top)))
1299 {
1300 src = ipa_get_ith_jump_func (top,
1301 dst->value.pass_through.formal_id);
1302 *dst = *src;
1303 }
1304 else
1305 dst->type = IPA_JF_UNKNOWN;
1306 }
1307 }
1308 }
1309
1310 /* If TARGET is an addr_expr of a function declaration, make it the destination
1311 of an indirect edge IE and return the edge. Otherwise, return NULL. */
1312
1313 static struct cgraph_edge *
1314 make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
1315 {
1316 struct cgraph_node *callee;
1317
1318 if (TREE_CODE (target) != ADDR_EXPR)
1319 return NULL;
1320 target = TREE_OPERAND (target, 0);
1321 if (TREE_CODE (target) != FUNCTION_DECL)
1322 return NULL;
1323 callee = cgraph_node (target);
1324 if (!callee)
1325 return NULL;
1326
1327 cgraph_make_edge_direct (ie, callee);
1328 if (dump_file)
1329 {
1330 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1331 "(%s/%i -> %s/%i) for stmt ",
1332 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1333 cgraph_node_name (ie->caller), ie->caller->uid,
1334 cgraph_node_name (ie->callee), ie->callee->uid);
1335
1336 if (ie->call_stmt)
1337 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1338 else
1339 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1340 }
1341 return ie;
1342 }
1343
1344 /* Try to find a destination for indirect edge IE that corresponds to a simple
1345 call or a call of a member function pointer and where the destination is a
1346 pointer formal parameter described by jump function JFUNC. If it can be
1347 determined, return the newly direct edge, otherwise return NULL. */
1348
1349 static struct cgraph_edge *
1350 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1351 struct ipa_jump_func *jfunc)
1352 {
1353 tree target;
1354
1355 if (jfunc->type == IPA_JF_CONST)
1356 target = jfunc->value.constant;
1357 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1358 target = jfunc->value.member_cst.pfn;
1359 else
1360 return NULL;
1361
1362 return make_edge_direct_to_target (ie, target);
1363 }
1364
1365 /* Try to find a destination for indirect edge IE that corresponds to a
1366 virtuall call based on a formal parameter which is described by jump
1367 function JFUNC and if it can be determined, make it direct and return the
1368 direct edge. Otherwise, return NULL. */
1369
1370 static struct cgraph_edge *
1371 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1372 struct ipa_jump_func *jfunc)
1373 {
1374 tree binfo, type, target;
1375 HOST_WIDE_INT token;
1376
1377 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1378 binfo = jfunc->value.base_binfo;
1379 else if (jfunc->type == IPA_JF_CONST)
1380 {
1381 tree cst = jfunc->value.constant;
1382 if (TREE_CODE (cst) == ADDR_EXPR)
1383 binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (cst, 0),
1384 NULL_TREE);
1385 else
1386 return NULL;
1387 }
1388 else
1389 return NULL;
1390
1391 if (!binfo)
1392 return NULL;
1393
1394 token = ie->indirect_info->otr_token;
1395 type = ie->indirect_info->otr_type;
1396 binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
1397 if (binfo)
1398 target = gimple_fold_obj_type_ref_known_binfo (token, binfo);
1399 else
1400 return NULL;
1401
1402 if (target)
1403 return make_edge_direct_to_target (ie, target);
1404 else
1405 return NULL;
1406 }
1407
1408 /* Update the param called notes associated with NODE when CS is being inlined,
1409 assuming NODE is (potentially indirectly) inlined into CS->callee.
1410 Moreover, if the callee is discovered to be constant, create a new cgraph
1411 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1412 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1413
1414 static bool
1415 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1416 struct cgraph_node *node,
1417 VEC (cgraph_edge_p, heap) **new_edges)
1418 {
1419 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1420 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1421 bool res = false;
1422
1423 ipa_check_create_edge_args ();
1424
1425 for (ie = node->indirect_calls; ie; ie = next_ie)
1426 {
1427 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1428 struct ipa_jump_func *jfunc;
1429
1430 next_ie = ie->next_callee;
1431 if (bitmap_bit_p (iinlining_processed_edges, ie->uid))
1432 continue;
1433
1434 /* If we ever use indirect edges for anything other than indirect
1435 inlining, we will need to skip those with negative param_indices. */
1436 if (ici->param_index == -1)
1437 continue;
1438
1439 /* We must check range due to calls with variable number of arguments: */
1440 if (ici->param_index >= ipa_get_cs_argument_count (top))
1441 {
1442 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1443 continue;
1444 }
1445
1446 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1447 if (jfunc->type == IPA_JF_PASS_THROUGH
1448 && jfunc->value.pass_through.operation == NOP_EXPR)
1449 ici->param_index = jfunc->value.pass_through.formal_id;
1450 else if (jfunc->type == IPA_JF_ANCESTOR)
1451 {
1452 ici->param_index = jfunc->value.ancestor.formal_id;
1453 ici->anc_offset += jfunc->value.ancestor.offset;
1454 }
1455 else
1456 /* Either we can find a destination for this edge now or never. */
1457 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1458
1459 if (ici->polymorphic)
1460 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1461 else
1462 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1463
1464 if (new_direct_edge)
1465 {
1466 new_direct_edge->indirect_inlining_edge = 1;
1467 if (new_edges)
1468 {
1469 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1470 new_direct_edge);
1471 top = IPA_EDGE_REF (cs);
1472 res = true;
1473 }
1474 }
1475 }
1476
1477 return res;
1478 }
1479
1480 /* Recursively traverse subtree of NODE (including node) made of inlined
1481 cgraph_edges when CS has been inlined and invoke
1482 update_indirect_edges_after_inlining on all nodes and
1483 update_jump_functions_after_inlining on all non-inlined edges that lead out
1484 of this subtree. Newly discovered indirect edges will be added to
1485 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1486 created. */
1487
1488 static bool
1489 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1490 struct cgraph_node *node,
1491 VEC (cgraph_edge_p, heap) **new_edges)
1492 {
1493 struct cgraph_edge *e;
1494 bool res;
1495
1496 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1497
1498 for (e = node->callees; e; e = e->next_callee)
1499 if (!e->inline_failed)
1500 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1501 else
1502 update_jump_functions_after_inlining (cs, e);
1503
1504 return res;
1505 }
1506
1507 /* Update jump functions and call note functions on inlining the call site CS.
1508 CS is expected to lead to a node already cloned by
1509 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1510 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1511 created. */
1512
1513 bool
1514 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1515 VEC (cgraph_edge_p, heap) **new_edges)
1516 {
1517 /* FIXME lto: We do not stream out indirect call information. */
1518 if (flag_wpa)
1519 return false;
1520
1521 /* Do nothing if the preparation phase has not been carried out yet
1522 (i.e. during early inlining). */
1523 if (!ipa_node_params_vector)
1524 return false;
1525 gcc_assert (ipa_edge_args_vector);
1526
1527 return propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1528 }
1529
1530 /* Frees all dynamically allocated structures that the argument info points
1531 to. */
1532
1533 void
1534 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1535 {
1536 if (args->jump_functions)
1537 ggc_free (args->jump_functions);
1538
1539 memset (args, 0, sizeof (*args));
1540 }
1541
1542 /* Free all ipa_edge structures. */
1543
1544 void
1545 ipa_free_all_edge_args (void)
1546 {
1547 int i;
1548 struct ipa_edge_args *args;
1549
1550 for (i = 0;
1551 VEC_iterate (ipa_edge_args_t, ipa_edge_args_vector, i, args);
1552 i++)
1553 ipa_free_edge_args_substructures (args);
1554
1555 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
1556 ipa_edge_args_vector = NULL;
1557 }
1558
1559 /* Frees all dynamically allocated structures that the param info points
1560 to. */
1561
1562 void
1563 ipa_free_node_params_substructures (struct ipa_node_params *info)
1564 {
1565 if (info->params)
1566 free (info->params);
1567
1568 memset (info, 0, sizeof (*info));
1569 }
1570
1571 /* Free all ipa_node_params structures. */
1572
1573 void
1574 ipa_free_all_node_params (void)
1575 {
1576 int i;
1577 struct ipa_node_params *info;
1578
1579 for (i = 0;
1580 VEC_iterate (ipa_node_params_t, ipa_node_params_vector, i, info);
1581 i++)
1582 ipa_free_node_params_substructures (info);
1583
1584 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
1585 ipa_node_params_vector = NULL;
1586 }
1587
1588 /* Hook that is called by cgraph.c when an edge is removed. */
1589
1590 static void
1591 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
1592 {
1593 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1594 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
1595 <= (unsigned)cs->uid)
1596 return;
1597 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1598 }
1599
1600 /* Hook that is called by cgraph.c when a node is removed. */
1601
1602 static void
1603 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1604 {
1605 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1606 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
1607 <= (unsigned)node->uid)
1608 return;
1609 ipa_free_node_params_substructures (IPA_NODE_REF (node));
1610 }
1611
1612 /* Helper function to duplicate an array of size N that is at SRC and store a
1613 pointer to it to DST. Nothing is done if SRC is NULL. */
1614
1615 static void *
1616 duplicate_array (void *src, size_t n)
1617 {
1618 void *p;
1619
1620 if (!src)
1621 return NULL;
1622
1623 p = xmalloc (n);
1624 memcpy (p, src, n);
1625 return p;
1626 }
1627
1628 /* Like duplicate_array byt in GGC memory. */
1629
1630 static void *
1631 duplicate_ggc_array (void *src, size_t n)
1632 {
1633 void *p;
1634
1635 if (!src)
1636 return NULL;
1637
1638 p = ggc_alloc (n);
1639 memcpy (p, src, n);
1640 return p;
1641 }
1642
1643 /* Hook that is called by cgraph.c when a node is duplicated. */
1644
1645 static void
1646 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
1647 __attribute__((unused)) void *data)
1648 {
1649 struct ipa_edge_args *old_args, *new_args;
1650 int arg_count;
1651
1652 ipa_check_create_edge_args ();
1653
1654 old_args = IPA_EDGE_REF (src);
1655 new_args = IPA_EDGE_REF (dst);
1656
1657 arg_count = ipa_get_cs_argument_count (old_args);
1658 ipa_set_cs_argument_count (new_args, arg_count);
1659 new_args->jump_functions = (struct ipa_jump_func *)
1660 duplicate_ggc_array (old_args->jump_functions,
1661 sizeof (struct ipa_jump_func) * arg_count);
1662
1663 if (iinlining_processed_edges
1664 && bitmap_bit_p (iinlining_processed_edges, src->uid))
1665 bitmap_set_bit (iinlining_processed_edges, dst->uid);
1666 }
1667
1668 /* Hook that is called by cgraph.c when a node is duplicated. */
1669
1670 static void
1671 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
1672 __attribute__((unused)) void *data)
1673 {
1674 struct ipa_node_params *old_info, *new_info;
1675 int param_count;
1676
1677 ipa_check_create_node_params ();
1678 old_info = IPA_NODE_REF (src);
1679 new_info = IPA_NODE_REF (dst);
1680 param_count = ipa_get_param_count (old_info);
1681
1682 ipa_set_param_count (new_info, param_count);
1683 new_info->params = (struct ipa_param_descriptor *)
1684 duplicate_array (old_info->params,
1685 sizeof (struct ipa_param_descriptor) * param_count);
1686 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
1687 new_info->count_scale = old_info->count_scale;
1688 }
1689
1690 /* Register our cgraph hooks if they are not already there. */
1691
1692 void
1693 ipa_register_cgraph_hooks (void)
1694 {
1695 if (!edge_removal_hook_holder)
1696 edge_removal_hook_holder =
1697 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
1698 if (!node_removal_hook_holder)
1699 node_removal_hook_holder =
1700 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
1701 if (!edge_duplication_hook_holder)
1702 edge_duplication_hook_holder =
1703 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
1704 if (!node_duplication_hook_holder)
1705 node_duplication_hook_holder =
1706 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
1707 }
1708
1709 /* Unregister our cgraph hooks if they are not already there. */
1710
1711 static void
1712 ipa_unregister_cgraph_hooks (void)
1713 {
1714 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
1715 edge_removal_hook_holder = NULL;
1716 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1717 node_removal_hook_holder = NULL;
1718 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
1719 edge_duplication_hook_holder = NULL;
1720 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1721 node_duplication_hook_holder = NULL;
1722 }
1723
1724 /* Allocate all necessary data strucutures necessary for indirect inlining. */
1725
1726 void
1727 ipa_create_all_structures_for_iinln (void)
1728 {
1729 iinlining_processed_edges = BITMAP_ALLOC (NULL);
1730 }
1731
1732 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1733 longer needed after ipa-cp. */
1734
1735 void
1736 ipa_free_all_structures_after_ipa_cp (void)
1737 {
1738 if (!flag_indirect_inlining)
1739 {
1740 ipa_free_all_edge_args ();
1741 ipa_free_all_node_params ();
1742 ipa_unregister_cgraph_hooks ();
1743 }
1744 }
1745
1746 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1747 longer needed after indirect inlining. */
1748
1749 void
1750 ipa_free_all_structures_after_iinln (void)
1751 {
1752 BITMAP_FREE (iinlining_processed_edges);
1753
1754 ipa_free_all_edge_args ();
1755 ipa_free_all_node_params ();
1756 ipa_unregister_cgraph_hooks ();
1757 }
1758
1759 /* Print ipa_tree_map data structures of all functions in the
1760 callgraph to F. */
1761
1762 void
1763 ipa_print_node_params (FILE * f, struct cgraph_node *node)
1764 {
1765 int i, count;
1766 tree temp;
1767 struct ipa_node_params *info;
1768
1769 if (!node->analyzed)
1770 return;
1771 info = IPA_NODE_REF (node);
1772 fprintf (f, " function %s parameter descriptors:\n",
1773 cgraph_node_name (node));
1774 count = ipa_get_param_count (info);
1775 for (i = 0; i < count; i++)
1776 {
1777 temp = ipa_get_param (info, i);
1778 if (TREE_CODE (temp) == PARM_DECL)
1779 fprintf (f, " param %d : %s", i,
1780 (DECL_NAME (temp)
1781 ? (*lang_hooks.decl_printable_name) (temp, 2)
1782 : "(unnamed)"));
1783 if (ipa_is_param_modified (info, i))
1784 fprintf (f, " modified");
1785 if (ipa_is_param_used (info, i))
1786 fprintf (f, " used");
1787 fprintf (f, "\n");
1788 }
1789 }
1790
1791 /* Print ipa_tree_map data structures of all functions in the
1792 callgraph to F. */
1793
1794 void
1795 ipa_print_all_params (FILE * f)
1796 {
1797 struct cgraph_node *node;
1798
1799 fprintf (f, "\nFunction parameters:\n");
1800 for (node = cgraph_nodes; node; node = node->next)
1801 ipa_print_node_params (f, node);
1802 }
1803
1804 /* Return a heap allocated vector containing formal parameters of FNDECL. */
1805
1806 VEC(tree, heap) *
1807 ipa_get_vector_of_formal_parms (tree fndecl)
1808 {
1809 VEC(tree, heap) *args;
1810 int count;
1811 tree parm;
1812
1813 count = count_formal_params_1 (fndecl);
1814 args = VEC_alloc (tree, heap, count);
1815 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = TREE_CHAIN (parm))
1816 VEC_quick_push (tree, args, parm);
1817
1818 return args;
1819 }
1820
1821 /* Return a heap allocated vector containing types of formal parameters of
1822 function type FNTYPE. */
1823
1824 static inline VEC(tree, heap) *
1825 get_vector_of_formal_parm_types (tree fntype)
1826 {
1827 VEC(tree, heap) *types;
1828 int count = 0;
1829 tree t;
1830
1831 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
1832 count++;
1833
1834 types = VEC_alloc (tree, heap, count);
1835 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
1836 VEC_quick_push (tree, types, TREE_VALUE (t));
1837
1838 return types;
1839 }
1840
1841 /* Modify the function declaration FNDECL and its type according to the plan in
1842 ADJUSTMENTS. It also sets base fields of individual adjustments structures
1843 to reflect the actual parameters being modified which are determined by the
1844 base_index field. */
1845
1846 void
1847 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
1848 const char *synth_parm_prefix)
1849 {
1850 VEC(tree, heap) *oparms, *otypes;
1851 tree orig_type, new_type = NULL;
1852 tree old_arg_types, t, new_arg_types = NULL;
1853 tree parm, *link = &DECL_ARGUMENTS (fndecl);
1854 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
1855 tree new_reversed = NULL;
1856 bool care_for_types, last_parm_void;
1857
1858 if (!synth_parm_prefix)
1859 synth_parm_prefix = "SYNTH";
1860
1861 oparms = ipa_get_vector_of_formal_parms (fndecl);
1862 orig_type = TREE_TYPE (fndecl);
1863 old_arg_types = TYPE_ARG_TYPES (orig_type);
1864
1865 /* The following test is an ugly hack, some functions simply don't have any
1866 arguments in their type. This is probably a bug but well... */
1867 care_for_types = (old_arg_types != NULL_TREE);
1868 if (care_for_types)
1869 {
1870 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
1871 == void_type_node);
1872 otypes = get_vector_of_formal_parm_types (orig_type);
1873 if (last_parm_void)
1874 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
1875 else
1876 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
1877 }
1878 else
1879 {
1880 last_parm_void = false;
1881 otypes = NULL;
1882 }
1883
1884 for (i = 0; i < len; i++)
1885 {
1886 struct ipa_parm_adjustment *adj;
1887 gcc_assert (link);
1888
1889 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
1890 parm = VEC_index (tree, oparms, adj->base_index);
1891 adj->base = parm;
1892
1893 if (adj->copy_param)
1894 {
1895 if (care_for_types)
1896 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
1897 adj->base_index),
1898 new_arg_types);
1899 *link = parm;
1900 link = &TREE_CHAIN (parm);
1901 }
1902 else if (!adj->remove_param)
1903 {
1904 tree new_parm;
1905 tree ptype;
1906
1907 if (adj->by_ref)
1908 ptype = build_pointer_type (adj->type);
1909 else
1910 ptype = adj->type;
1911
1912 if (care_for_types)
1913 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
1914
1915 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
1916 ptype);
1917 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
1918
1919 DECL_ARTIFICIAL (new_parm) = 1;
1920 DECL_ARG_TYPE (new_parm) = ptype;
1921 DECL_CONTEXT (new_parm) = fndecl;
1922 TREE_USED (new_parm) = 1;
1923 DECL_IGNORED_P (new_parm) = 1;
1924 layout_decl (new_parm, 0);
1925
1926 add_referenced_var (new_parm);
1927 mark_sym_for_renaming (new_parm);
1928 adj->base = parm;
1929 adj->reduction = new_parm;
1930
1931 *link = new_parm;
1932
1933 link = &TREE_CHAIN (new_parm);
1934 }
1935 }
1936
1937 *link = NULL_TREE;
1938
1939 if (care_for_types)
1940 {
1941 new_reversed = nreverse (new_arg_types);
1942 if (last_parm_void)
1943 {
1944 if (new_reversed)
1945 TREE_CHAIN (new_arg_types) = void_list_node;
1946 else
1947 new_reversed = void_list_node;
1948 }
1949 }
1950
1951 /* Use copy_node to preserve as much as possible from original type
1952 (debug info, attribute lists etc.)
1953 Exception is METHOD_TYPEs must have THIS argument.
1954 When we are asked to remove it, we need to build new FUNCTION_TYPE
1955 instead. */
1956 if (TREE_CODE (orig_type) != METHOD_TYPE
1957 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
1958 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
1959 {
1960 new_type = copy_node (orig_type);
1961 TYPE_ARG_TYPES (new_type) = new_reversed;
1962 }
1963 else
1964 {
1965 new_type
1966 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
1967 new_reversed));
1968 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
1969 DECL_VINDEX (fndecl) = NULL_TREE;
1970 }
1971
1972 /* This is a new type, not a copy of an old type. Need to reassociate
1973 variants. We can handle everything except the main variant lazily. */
1974 t = TYPE_MAIN_VARIANT (orig_type);
1975 if (orig_type != t)
1976 {
1977 TYPE_MAIN_VARIANT (new_type) = t;
1978 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
1979 TYPE_NEXT_VARIANT (t) = new_type;
1980 }
1981 else
1982 {
1983 TYPE_MAIN_VARIANT (new_type) = new_type;
1984 TYPE_NEXT_VARIANT (new_type) = NULL;
1985 }
1986
1987 TREE_TYPE (fndecl) = new_type;
1988 if (otypes)
1989 VEC_free (tree, heap, otypes);
1990 VEC_free (tree, heap, oparms);
1991 }
1992
1993 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
1994 If this is a directly recursive call, CS must be NULL. Otherwise it must
1995 contain the corresponding call graph edge. */
1996
1997 void
1998 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
1999 ipa_parm_adjustment_vec adjustments)
2000 {
2001 VEC(tree, heap) *vargs;
2002 gimple new_stmt;
2003 gimple_stmt_iterator gsi;
2004 tree callee_decl;
2005 int i, len;
2006
2007 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2008 vargs = VEC_alloc (tree, heap, len);
2009
2010 gsi = gsi_for_stmt (stmt);
2011 for (i = 0; i < len; i++)
2012 {
2013 struct ipa_parm_adjustment *adj;
2014
2015 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2016
2017 if (adj->copy_param)
2018 {
2019 tree arg = gimple_call_arg (stmt, adj->base_index);
2020
2021 VEC_quick_push (tree, vargs, arg);
2022 }
2023 else if (!adj->remove_param)
2024 {
2025 tree expr, orig_expr;
2026 bool allow_ptr, repl_found;
2027
2028 orig_expr = expr = gimple_call_arg (stmt, adj->base_index);
2029 if (TREE_CODE (expr) == ADDR_EXPR)
2030 {
2031 allow_ptr = false;
2032 expr = TREE_OPERAND (expr, 0);
2033 }
2034 else
2035 allow_ptr = true;
2036
2037 repl_found = build_ref_for_offset (&expr, TREE_TYPE (expr),
2038 adj->offset, adj->type,
2039 allow_ptr);
2040 if (repl_found)
2041 {
2042 if (adj->by_ref)
2043 expr = build_fold_addr_expr (expr);
2044 }
2045 else
2046 {
2047 tree ptrtype = build_pointer_type (adj->type);
2048 expr = orig_expr;
2049 if (!POINTER_TYPE_P (TREE_TYPE (expr)))
2050 expr = build_fold_addr_expr (expr);
2051 if (!useless_type_conversion_p (ptrtype, TREE_TYPE (expr)))
2052 expr = fold_convert (ptrtype, expr);
2053 expr = fold_build2 (POINTER_PLUS_EXPR, ptrtype, expr,
2054 build_int_cst (sizetype,
2055 adj->offset / BITS_PER_UNIT));
2056 if (!adj->by_ref)
2057 expr = fold_build1 (INDIRECT_REF, adj->type, expr);
2058 }
2059 expr = force_gimple_operand_gsi (&gsi, expr,
2060 adj->by_ref
2061 || is_gimple_reg_type (adj->type),
2062 NULL, true, GSI_SAME_STMT);
2063 VEC_quick_push (tree, vargs, expr);
2064 }
2065 }
2066
2067 if (dump_file && (dump_flags & TDF_DETAILS))
2068 {
2069 fprintf (dump_file, "replacing stmt:");
2070 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2071 }
2072
2073 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2074 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2075 VEC_free (tree, heap, vargs);
2076 if (gimple_call_lhs (stmt))
2077 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2078
2079 gimple_set_block (new_stmt, gimple_block (stmt));
2080 if (gimple_has_location (stmt))
2081 gimple_set_location (new_stmt, gimple_location (stmt));
2082 gimple_call_copy_flags (new_stmt, stmt);
2083 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2084
2085 if (dump_file && (dump_flags & TDF_DETAILS))
2086 {
2087 fprintf (dump_file, "with stmt:");
2088 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2089 fprintf (dump_file, "\n");
2090 }
2091 gsi_replace (&gsi, new_stmt, true);
2092 if (cs)
2093 cgraph_set_call_stmt (cs, new_stmt);
2094 update_ssa (TODO_update_ssa);
2095 free_dominance_info (CDI_DOMINATORS);
2096 }
2097
2098 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2099
2100 static bool
2101 index_in_adjustments_multiple_times_p (int base_index,
2102 ipa_parm_adjustment_vec adjustments)
2103 {
2104 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2105 bool one = false;
2106
2107 for (i = 0; i < len; i++)
2108 {
2109 struct ipa_parm_adjustment *adj;
2110 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2111
2112 if (adj->base_index == base_index)
2113 {
2114 if (one)
2115 return true;
2116 else
2117 one = true;
2118 }
2119 }
2120 return false;
2121 }
2122
2123
2124 /* Return adjustments that should have the same effect on function parameters
2125 and call arguments as if they were first changed according to adjustments in
2126 INNER and then by adjustments in OUTER. */
2127
2128 ipa_parm_adjustment_vec
2129 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2130 ipa_parm_adjustment_vec outer)
2131 {
2132 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2133 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2134 int removals = 0;
2135 ipa_parm_adjustment_vec adjustments, tmp;
2136
2137 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2138 for (i = 0; i < inlen; i++)
2139 {
2140 struct ipa_parm_adjustment *n;
2141 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2142
2143 if (n->remove_param)
2144 removals++;
2145 else
2146 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2147 }
2148
2149 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2150 for (i = 0; i < outlen; i++)
2151 {
2152 struct ipa_parm_adjustment *r;
2153 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2154 outer, i);
2155 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2156 out->base_index);
2157
2158 gcc_assert (!in->remove_param);
2159 if (out->remove_param)
2160 {
2161 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2162 {
2163 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2164 memset (r, 0, sizeof (*r));
2165 r->remove_param = true;
2166 }
2167 continue;
2168 }
2169
2170 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2171 memset (r, 0, sizeof (*r));
2172 r->base_index = in->base_index;
2173 r->type = out->type;
2174
2175 /* FIXME: Create nonlocal value too. */
2176
2177 if (in->copy_param && out->copy_param)
2178 r->copy_param = true;
2179 else if (in->copy_param)
2180 r->offset = out->offset;
2181 else if (out->copy_param)
2182 r->offset = in->offset;
2183 else
2184 r->offset = in->offset + out->offset;
2185 }
2186
2187 for (i = 0; i < inlen; i++)
2188 {
2189 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2190 inner, i);
2191
2192 if (n->remove_param)
2193 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2194 }
2195
2196 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2197 return adjustments;
2198 }
2199
2200 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2201 friendly way, assuming they are meant to be applied to FNDECL. */
2202
2203 void
2204 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2205 tree fndecl)
2206 {
2207 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2208 bool first = true;
2209 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2210
2211 fprintf (file, "IPA param adjustments: ");
2212 for (i = 0; i < len; i++)
2213 {
2214 struct ipa_parm_adjustment *adj;
2215 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2216
2217 if (!first)
2218 fprintf (file, " ");
2219 else
2220 first = false;
2221
2222 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2223 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2224 if (adj->base)
2225 {
2226 fprintf (file, ", base: ");
2227 print_generic_expr (file, adj->base, 0);
2228 }
2229 if (adj->reduction)
2230 {
2231 fprintf (file, ", reduction: ");
2232 print_generic_expr (file, adj->reduction, 0);
2233 }
2234 if (adj->new_ssa_base)
2235 {
2236 fprintf (file, ", new_ssa_base: ");
2237 print_generic_expr (file, adj->new_ssa_base, 0);
2238 }
2239
2240 if (adj->copy_param)
2241 fprintf (file, ", copy_param");
2242 else if (adj->remove_param)
2243 fprintf (file, ", remove_param");
2244 else
2245 fprintf (file, ", offset %li", (long) adj->offset);
2246 if (adj->by_ref)
2247 fprintf (file, ", by_ref");
2248 print_node_brief (file, ", type: ", adj->type, 0);
2249 fprintf (file, "\n");
2250 }
2251 VEC_free (tree, heap, parms);
2252 }
2253
2254 /* Stream out jump function JUMP_FUNC to OB. */
2255
2256 static void
2257 ipa_write_jump_function (struct output_block *ob,
2258 struct ipa_jump_func *jump_func)
2259 {
2260 lto_output_uleb128_stream (ob->main_stream,
2261 jump_func->type);
2262
2263 switch (jump_func->type)
2264 {
2265 case IPA_JF_UNKNOWN:
2266 break;
2267 case IPA_JF_KNOWN_TYPE:
2268 lto_output_tree (ob, jump_func->value.base_binfo, true);
2269 break;
2270 case IPA_JF_CONST:
2271 lto_output_tree (ob, jump_func->value.constant, true);
2272 break;
2273 case IPA_JF_PASS_THROUGH:
2274 lto_output_tree (ob, jump_func->value.pass_through.operand, true);
2275 lto_output_uleb128_stream (ob->main_stream,
2276 jump_func->value.pass_through.formal_id);
2277 lto_output_uleb128_stream (ob->main_stream,
2278 jump_func->value.pass_through.operation);
2279 break;
2280 case IPA_JF_ANCESTOR:
2281 lto_output_uleb128_stream (ob->main_stream,
2282 jump_func->value.ancestor.offset);
2283 lto_output_tree (ob, jump_func->value.ancestor.type, true);
2284 lto_output_uleb128_stream (ob->main_stream,
2285 jump_func->value.ancestor.formal_id);
2286 break;
2287 case IPA_JF_CONST_MEMBER_PTR:
2288 lto_output_tree (ob, jump_func->value.member_cst.pfn, true);
2289 lto_output_tree (ob, jump_func->value.member_cst.delta, false);
2290 break;
2291 }
2292 }
2293
2294 /* Read in jump function JUMP_FUNC from IB. */
2295
2296 static void
2297 ipa_read_jump_function (struct lto_input_block *ib,
2298 struct ipa_jump_func *jump_func,
2299 struct data_in *data_in)
2300 {
2301 jump_func->type = (enum jump_func_type) lto_input_uleb128 (ib);
2302
2303 switch (jump_func->type)
2304 {
2305 case IPA_JF_UNKNOWN:
2306 break;
2307 case IPA_JF_KNOWN_TYPE:
2308 jump_func->value.base_binfo = lto_input_tree (ib, data_in);
2309 break;
2310 case IPA_JF_CONST:
2311 jump_func->value.constant = lto_input_tree (ib, data_in);
2312 break;
2313 case IPA_JF_PASS_THROUGH:
2314 jump_func->value.pass_through.operand = lto_input_tree (ib, data_in);
2315 jump_func->value.pass_through.formal_id = lto_input_uleb128 (ib);
2316 jump_func->value.pass_through.operation = (enum tree_code) lto_input_uleb128 (ib);
2317 break;
2318 case IPA_JF_ANCESTOR:
2319 jump_func->value.ancestor.offset = lto_input_uleb128 (ib);
2320 jump_func->value.ancestor.type = lto_input_tree (ib, data_in);
2321 jump_func->value.ancestor.formal_id = lto_input_uleb128 (ib);
2322 break;
2323 case IPA_JF_CONST_MEMBER_PTR:
2324 jump_func->value.member_cst.pfn = lto_input_tree (ib, data_in);
2325 jump_func->value.member_cst.delta = lto_input_tree (ib, data_in);
2326 break;
2327 }
2328 }
2329
2330 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2331 relevant to indirect inlining to OB. */
2332
2333 static void
2334 ipa_write_indirect_edge_info (struct output_block *ob,
2335 struct cgraph_edge *cs)
2336 {
2337 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2338 struct bitpack_d *bp;
2339
2340 lto_output_sleb128_stream (ob->main_stream, ii->param_index);
2341 lto_output_sleb128_stream (ob->main_stream, ii->anc_offset);
2342 bp = bitpack_create ();
2343 bp_pack_value (bp, ii->polymorphic, 1);
2344 lto_output_bitpack (ob->main_stream, bp);
2345 bitpack_delete (bp);
2346
2347 if (ii->polymorphic)
2348 {
2349 lto_output_sleb128_stream (ob->main_stream, ii->otr_token);
2350 lto_output_tree (ob, ii->otr_type, true);
2351 }
2352 }
2353
2354 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2355 relevant to indirect inlining from IB. */
2356
2357 static void
2358 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2359 struct data_in *data_in ATTRIBUTE_UNUSED,
2360 struct cgraph_edge *cs)
2361 {
2362 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2363 struct bitpack_d *bp;
2364
2365 ii->param_index = (int) lto_input_sleb128 (ib);
2366 ii->anc_offset = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2367 bp = lto_input_bitpack (ib);
2368 ii->polymorphic = bp_unpack_value (bp, 1);
2369 bitpack_delete (bp);
2370 if (ii->polymorphic)
2371 {
2372 ii->otr_token = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2373 ii->otr_type = lto_input_tree (ib, data_in);
2374 }
2375 }
2376
2377 /* Stream out NODE info to OB. */
2378
2379 static void
2380 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2381 {
2382 int node_ref;
2383 lto_cgraph_encoder_t encoder;
2384 struct ipa_node_params *info = IPA_NODE_REF (node);
2385 int j;
2386 struct cgraph_edge *e;
2387 struct bitpack_d *bp;
2388
2389 encoder = ob->decl_state->cgraph_node_encoder;
2390 node_ref = lto_cgraph_encoder_encode (encoder, node);
2391 lto_output_uleb128_stream (ob->main_stream, node_ref);
2392
2393 bp = bitpack_create ();
2394 bp_pack_value (bp, info->called_with_var_arguments, 1);
2395 bp_pack_value (bp, info->uses_analysis_done, 1);
2396 gcc_assert (info->modification_analysis_done
2397 || ipa_get_param_count (info) == 0);
2398 gcc_assert (!info->node_enqueued);
2399 gcc_assert (!info->ipcp_orig_node);
2400 for (j = 0; j < ipa_get_param_count (info); j++)
2401 {
2402 bp_pack_value (bp, info->params[j].modified, 1);
2403 bp_pack_value (bp, info->params[j].used, 1);
2404 }
2405 lto_output_bitpack (ob->main_stream, bp);
2406 bitpack_delete (bp);
2407 for (e = node->callees; e; e = e->next_callee)
2408 {
2409 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2410
2411 lto_output_uleb128_stream (ob->main_stream,
2412 ipa_get_cs_argument_count (args));
2413 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2414 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2415 }
2416 for (e = node->indirect_calls; e; e = e->next_callee)
2417 ipa_write_indirect_edge_info (ob, e);
2418 }
2419
2420 /* Srtream in NODE info from IB. */
2421
2422 static void
2423 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2424 struct data_in *data_in)
2425 {
2426 struct ipa_node_params *info = IPA_NODE_REF (node);
2427 int k;
2428 struct cgraph_edge *e;
2429 struct bitpack_d *bp;
2430
2431 ipa_initialize_node_params (node);
2432
2433 bp = lto_input_bitpack (ib);
2434 info->called_with_var_arguments = bp_unpack_value (bp, 1);
2435 info->uses_analysis_done = bp_unpack_value (bp, 1);
2436 if (ipa_get_param_count (info) != 0)
2437 {
2438 info->modification_analysis_done = true;
2439 info->uses_analysis_done = true;
2440 }
2441 info->node_enqueued = false;
2442 for (k = 0; k < ipa_get_param_count (info); k++)
2443 {
2444 info->params[k].modified = bp_unpack_value (bp, 1);
2445 info->params[k].used = bp_unpack_value (bp, 1);
2446 }
2447 bitpack_delete (bp);
2448 for (e = node->callees; e; e = e->next_callee)
2449 {
2450 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2451 int count = lto_input_uleb128 (ib);
2452
2453 ipa_set_cs_argument_count (args, count);
2454 if (!count)
2455 continue;
2456
2457 args->jump_functions = GGC_CNEWVEC (struct ipa_jump_func,
2458 ipa_get_cs_argument_count (args));
2459 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2460 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2461 }
2462 for (e = node->indirect_calls; e; e = e->next_callee)
2463 ipa_read_indirect_edge_info (ib, data_in, e);
2464 }
2465
2466 /* Write jump functions for nodes in SET. */
2467
2468 void
2469 ipa_prop_write_jump_functions (cgraph_node_set set)
2470 {
2471 struct cgraph_node *node;
2472 struct output_block *ob = create_output_block (LTO_section_jump_functions);
2473 unsigned int count = 0;
2474 cgraph_node_set_iterator csi;
2475
2476 ob->cgraph_node = NULL;
2477
2478 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2479 {
2480 node = csi_node (csi);
2481 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2482 count++;
2483 }
2484
2485 lto_output_uleb128_stream (ob->main_stream, count);
2486
2487 /* Process all of the functions. */
2488 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2489 {
2490 node = csi_node (csi);
2491 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2492 ipa_write_node_info (ob, node);
2493 }
2494 lto_output_1_stream (ob->main_stream, 0);
2495 produce_asm (ob, NULL);
2496 destroy_output_block (ob);
2497 }
2498
2499 /* Read section in file FILE_DATA of length LEN with data DATA. */
2500
2501 static void
2502 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
2503 size_t len)
2504 {
2505 const struct lto_function_header *header =
2506 (const struct lto_function_header *) data;
2507 const int32_t cfg_offset = sizeof (struct lto_function_header);
2508 const int32_t main_offset = cfg_offset + header->cfg_size;
2509 const int32_t string_offset = main_offset + header->main_size;
2510 struct data_in *data_in;
2511 struct lto_input_block ib_main;
2512 unsigned int i;
2513 unsigned int count;
2514
2515 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2516 header->main_size);
2517
2518 data_in =
2519 lto_data_in_create (file_data, (const char *) data + string_offset,
2520 header->string_size, NULL);
2521 count = lto_input_uleb128 (&ib_main);
2522
2523 for (i = 0; i < count; i++)
2524 {
2525 unsigned int index;
2526 struct cgraph_node *node;
2527 lto_cgraph_encoder_t encoder;
2528
2529 index = lto_input_uleb128 (&ib_main);
2530 encoder = file_data->cgraph_node_encoder;
2531 node = lto_cgraph_encoder_deref (encoder, index);
2532 gcc_assert (node->analyzed);
2533 ipa_read_node_info (&ib_main, node, data_in);
2534 }
2535 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
2536 len);
2537 lto_data_in_delete (data_in);
2538 }
2539
2540 /* Read ipcp jump functions. */
2541
2542 void
2543 ipa_prop_read_jump_functions (void)
2544 {
2545 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2546 struct lto_file_decl_data *file_data;
2547 unsigned int j = 0;
2548
2549 ipa_check_create_node_params ();
2550 ipa_check_create_edge_args ();
2551 ipa_register_cgraph_hooks ();
2552
2553 while ((file_data = file_data_vec[j++]))
2554 {
2555 size_t len;
2556 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
2557
2558 if (data)
2559 ipa_prop_read_section (file_data, data, len);
2560 }
2561 }
2562
2563 /* After merging units, we can get mismatch in argument counts.
2564 Also decl merging might've rendered parameter lists obsolette.
2565 Also compute called_with_variable_arg info. */
2566
2567 void
2568 ipa_update_after_lto_read (void)
2569 {
2570 struct cgraph_node *node;
2571 struct cgraph_edge *cs;
2572
2573 ipa_check_create_node_params ();
2574 ipa_check_create_edge_args ();
2575
2576 for (node = cgraph_nodes; node; node = node->next)
2577 if (node->analyzed)
2578 ipa_initialize_node_params (node);
2579
2580 for (node = cgraph_nodes; node; node = node->next)
2581 if (node->analyzed)
2582 for (cs = node->callees; cs; cs = cs->next_callee)
2583 {
2584 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
2585 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
2586 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
2587 }
2588 }