5e85325048fdb9eea4a9114a84f6dafb90a3c84c
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "timevar.h"
36 #include "flags.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
41 #include "data-streamer.h"
42 #include "tree-streamer.h"
43
44
45 /* Intermediate information about a parameter that is only useful during the
46 run of ipa_analyze_node and is not kept afterwards. */
47
48 struct param_analysis_info
49 {
50 bool modified;
51 bitmap visited_statements;
52 };
53
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
56 /* Vector where the parameter infos are actually stored. */
57 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
58
59 /* Holders of ipa cgraph hooks: */
60 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
61 static struct cgraph_node_hook_list *node_removal_hook_holder;
62 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
63 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
64 static struct cgraph_node_hook_list *function_insertion_hook_holder;
65
66 /* Return index of the formal whose tree is PTREE in function which corresponds
67 to INFO. */
68
69 int
70 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
71 {
72 int i, count;
73
74 count = ipa_get_param_count (info);
75 for (i = 0; i < count; i++)
76 if (ipa_get_param (info, i) == ptree)
77 return i;
78
79 return -1;
80 }
81
82 /* Populate the param_decl field in parameter descriptors of INFO that
83 corresponds to NODE. */
84
85 static void
86 ipa_populate_param_decls (struct cgraph_node *node,
87 struct ipa_node_params *info)
88 {
89 tree fndecl;
90 tree fnargs;
91 tree parm;
92 int param_num;
93
94 fndecl = node->decl;
95 fnargs = DECL_ARGUMENTS (fndecl);
96 param_num = 0;
97 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
98 {
99 VEC_index (ipa_param_descriptor_t,
100 info->descriptors, param_num)->decl = parm;
101 param_num++;
102 }
103 }
104
105 /* Return how many formal parameters FNDECL has. */
106
107 static inline int
108 count_formal_params (tree fndecl)
109 {
110 tree parm;
111 int count = 0;
112
113 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
114 count++;
115
116 return count;
117 }
118
119 /* Initialize the ipa_node_params structure associated with NODE by counting
120 the function parameters, creating the descriptors and populating their
121 param_decls. */
122
123 void
124 ipa_initialize_node_params (struct cgraph_node *node)
125 {
126 struct ipa_node_params *info = IPA_NODE_REF (node);
127
128 if (!info->descriptors)
129 {
130 int param_count;
131
132 param_count = count_formal_params (node->decl);
133 if (param_count)
134 {
135 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
136 info->descriptors, param_count);
137 ipa_populate_param_decls (node, info);
138 }
139 }
140 }
141
142 /* Print the jump functions associated with call graph edge CS to file F. */
143
144 static void
145 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
146 {
147 int i, count;
148
149 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
150 for (i = 0; i < count; i++)
151 {
152 struct ipa_jump_func *jump_func;
153 enum jump_func_type type;
154
155 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
156 type = jump_func->type;
157
158 fprintf (f, " param %d: ", i);
159 if (type == IPA_JF_UNKNOWN)
160 fprintf (f, "UNKNOWN\n");
161 else if (type == IPA_JF_KNOWN_TYPE)
162 {
163 tree binfo_type = TREE_TYPE (jump_func->value.base_binfo);
164 fprintf (f, "KNOWN TYPE, type in binfo is: ");
165 print_generic_expr (f, binfo_type, 0);
166 fprintf (f, " (%u)\n", TYPE_UID (binfo_type));
167 }
168 else if (type == IPA_JF_CONST)
169 {
170 tree val = jump_func->value.constant;
171 fprintf (f, "CONST: ");
172 print_generic_expr (f, val, 0);
173 if (TREE_CODE (val) == ADDR_EXPR
174 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
175 {
176 fprintf (f, " -> ");
177 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
178 0);
179 }
180 fprintf (f, "\n");
181 }
182 else if (type == IPA_JF_CONST_MEMBER_PTR)
183 {
184 fprintf (f, "CONST MEMBER PTR: ");
185 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
186 fprintf (f, ", ");
187 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
188 fprintf (f, "\n");
189 }
190 else if (type == IPA_JF_PASS_THROUGH)
191 {
192 fprintf (f, "PASS THROUGH: ");
193 fprintf (f, "%d, op %s ",
194 jump_func->value.pass_through.formal_id,
195 tree_code_name[(int)
196 jump_func->value.pass_through.operation]);
197 if (jump_func->value.pass_through.operation != NOP_EXPR)
198 print_generic_expr (f,
199 jump_func->value.pass_through.operand, 0);
200 fprintf (f, "\n");
201 }
202 else if (type == IPA_JF_ANCESTOR)
203 {
204 fprintf (f, "ANCESTOR: ");
205 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
206 jump_func->value.ancestor.formal_id,
207 jump_func->value.ancestor.offset);
208 print_generic_expr (f, jump_func->value.ancestor.type, 0);
209 fprintf (f, "\n");
210 }
211 }
212 }
213
214
215 /* Print the jump functions of all arguments on all call graph edges going from
216 NODE to file F. */
217
218 void
219 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
220 {
221 struct cgraph_edge *cs;
222 int i;
223
224 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
225 for (cs = node->callees; cs; cs = cs->next_callee)
226 {
227 if (!ipa_edge_args_info_available_for_edge_p (cs))
228 continue;
229
230 fprintf (f, " callsite %s/%i -> %s/%i : \n",
231 cgraph_node_name (node), node->uid,
232 cgraph_node_name (cs->callee), cs->callee->uid);
233 ipa_print_node_jump_functions_for_edge (f, cs);
234 }
235
236 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
237 {
238 if (!ipa_edge_args_info_available_for_edge_p (cs))
239 continue;
240
241 if (cs->call_stmt)
242 {
243 fprintf (f, " indirect callsite %d for stmt ", i);
244 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
245 }
246 else
247 fprintf (f, " indirect callsite %d :\n", i);
248 ipa_print_node_jump_functions_for_edge (f, cs);
249
250 }
251 }
252
253 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
254
255 void
256 ipa_print_all_jump_functions (FILE *f)
257 {
258 struct cgraph_node *node;
259
260 fprintf (f, "\nJump functions:\n");
261 for (node = cgraph_nodes; node; node = node->next)
262 {
263 ipa_print_node_jump_functions (f, node);
264 }
265 }
266
267 /* Structure to be passed in between detect_type_change and
268 check_stmt_for_type_change. */
269
270 struct type_change_info
271 {
272 /* Set to true if dynamic type change has been detected. */
273 bool type_maybe_changed;
274 };
275
276 /* Return true if STMT can modify a virtual method table pointer.
277
278 This function makes special assumptions about both constructors and
279 destructors which are all the functions that are allowed to alter the VMT
280 pointers. It assumes that destructors begin with assignment into all VMT
281 pointers and that constructors essentially look in the following way:
282
283 1) The very first thing they do is that they call constructors of ancestor
284 sub-objects that have them.
285
286 2) Then VMT pointers of this and all its ancestors is set to new values
287 corresponding to the type corresponding to the constructor.
288
289 3) Only afterwards, other stuff such as constructor of member sub-objects
290 and the code written by the user is run. Only this may include calling
291 virtual functions, directly or indirectly.
292
293 There is no way to call a constructor of an ancestor sub-object in any
294 other way.
295
296 This means that we do not have to care whether constructors get the correct
297 type information because they will always change it (in fact, if we define
298 the type to be given by the VMT pointer, it is undefined).
299
300 The most important fact to derive from the above is that if, for some
301 statement in the section 3, we try to detect whether the dynamic type has
302 changed, we can safely ignore all calls as we examine the function body
303 backwards until we reach statements in section 2 because these calls cannot
304 be ancestor constructors or destructors (if the input is not bogus) and so
305 do not change the dynamic type (this holds true only for automatically
306 allocated objects but at the moment we devirtualize only these). We then
307 must detect that statements in section 2 change the dynamic type and can try
308 to derive the new type. That is enough and we can stop, we will never see
309 the calls into constructors of sub-objects in this code. Therefore we can
310 safely ignore all call statements that we traverse.
311 */
312
313 static bool
314 stmt_may_be_vtbl_ptr_store (gimple stmt)
315 {
316 if (is_gimple_call (stmt))
317 return false;
318 else if (is_gimple_assign (stmt))
319 {
320 tree lhs = gimple_assign_lhs (stmt);
321
322 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
323 {
324 if (flag_strict_aliasing
325 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
326 return false;
327
328 if (TREE_CODE (lhs) == COMPONENT_REF
329 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
330 return false;
331 /* In the future we might want to use get_base_ref_and_offset to find
332 if there is a field corresponding to the offset and if so, proceed
333 almost like if it was a component ref. */
334 }
335 }
336 return true;
337 }
338
339 /* Callback of walk_aliased_vdefs and a helper function for
340 detect_type_change to check whether a particular statement may modify
341 the virtual table pointer, and if possible also determine the new type of
342 the (sub-)object. It stores its result into DATA, which points to a
343 type_change_info structure. */
344
345 static bool
346 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
347 {
348 gimple stmt = SSA_NAME_DEF_STMT (vdef);
349 struct type_change_info *tci = (struct type_change_info *) data;
350
351 if (stmt_may_be_vtbl_ptr_store (stmt))
352 {
353 tci->type_maybe_changed = true;
354 return true;
355 }
356 else
357 return false;
358 }
359
360 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
361 looking for assignments to its virtual table pointer. If it is, return true
362 and fill in the jump function JFUNC with relevant type information or set it
363 to unknown. ARG is the object itself (not a pointer to it, unless
364 dereferenced). BASE is the base of the memory access as returned by
365 get_ref_base_and_extent, as is the offset. */
366
367 static bool
368 detect_type_change (tree arg, tree base, gimple call,
369 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
370 {
371 struct type_change_info tci;
372 ao_ref ao;
373
374 gcc_checking_assert (DECL_P (arg)
375 || TREE_CODE (arg) == MEM_REF
376 || handled_component_p (arg));
377 /* Const calls cannot call virtual methods through VMT and so type changes do
378 not matter. */
379 if (!flag_devirtualize || !gimple_vuse (call))
380 return false;
381
382 tci.type_maybe_changed = false;
383
384 ao.ref = arg;
385 ao.base = base;
386 ao.offset = offset;
387 ao.size = POINTER_SIZE;
388 ao.max_size = ao.size;
389 ao.ref_alias_set = -1;
390 ao.base_alias_set = -1;
391
392 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
393 &tci, NULL);
394 if (!tci.type_maybe_changed)
395 return false;
396
397 jfunc->type = IPA_JF_UNKNOWN;
398 return true;
399 }
400
401 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
402 SSA name (its dereference will become the base and the offset is assumed to
403 be zero). */
404
405 static bool
406 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
407 {
408 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
409 if (!flag_devirtualize
410 || !POINTER_TYPE_P (TREE_TYPE (arg))
411 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
412 return false;
413
414 arg = build2 (MEM_REF, ptr_type_node, arg,
415 build_int_cst (ptr_type_node, 0));
416
417 return detect_type_change (arg, arg, call, jfunc, 0);
418 }
419
420
421 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
422 of an assignment statement STMT, try to find out whether NAME can be
423 described by a (possibly polynomial) pass-through jump-function or an
424 ancestor jump function and if so, write the appropriate function into
425 JFUNC */
426
427 static void
428 compute_complex_assign_jump_func (struct ipa_node_params *info,
429 struct ipa_jump_func *jfunc,
430 gimple call, gimple stmt, tree name)
431 {
432 HOST_WIDE_INT offset, size, max_size;
433 tree op1, op2, base, ssa;
434 int index;
435
436 op1 = gimple_assign_rhs1 (stmt);
437 op2 = gimple_assign_rhs2 (stmt);
438
439 if (TREE_CODE (op1) == SSA_NAME
440 && SSA_NAME_IS_DEFAULT_DEF (op1))
441 {
442 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
443 if (index < 0)
444 return;
445
446 if (op2)
447 {
448 if (!is_gimple_ip_invariant (op2)
449 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
450 && !useless_type_conversion_p (TREE_TYPE (name),
451 TREE_TYPE (op1))))
452 return;
453
454 jfunc->type = IPA_JF_PASS_THROUGH;
455 jfunc->value.pass_through.formal_id = index;
456 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
457 jfunc->value.pass_through.operand = op2;
458 }
459 else if (gimple_assign_unary_nop_p (stmt)
460 && !detect_type_change_ssa (op1, call, jfunc))
461 {
462 jfunc->type = IPA_JF_PASS_THROUGH;
463 jfunc->value.pass_through.formal_id = index;
464 jfunc->value.pass_through.operation = NOP_EXPR;
465 }
466 return;
467 }
468
469 if (TREE_CODE (op1) != ADDR_EXPR)
470 return;
471 op1 = TREE_OPERAND (op1, 0);
472 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
473 return;
474 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
475 if (TREE_CODE (base) != MEM_REF
476 /* If this is a varying address, punt. */
477 || max_size == -1
478 || max_size != size)
479 return;
480 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
481 ssa = TREE_OPERAND (base, 0);
482 if (TREE_CODE (ssa) != SSA_NAME
483 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
484 || offset < 0)
485 return;
486
487 /* Dynamic types are changed only in constructors and destructors and */
488 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
489 if (index >= 0
490 && !detect_type_change (op1, base, call, jfunc, offset))
491 {
492 jfunc->type = IPA_JF_ANCESTOR;
493 jfunc->value.ancestor.formal_id = index;
494 jfunc->value.ancestor.offset = offset;
495 jfunc->value.ancestor.type = TREE_TYPE (op1);
496 }
497 }
498
499 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
500 it looks like:
501
502 iftmp.1_3 = &obj_2(D)->D.1762;
503
504 The base of the MEM_REF must be a default definition SSA NAME of a
505 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
506 whole MEM_REF expression is returned and the offset calculated from any
507 handled components and the MEM_REF itself is stored into *OFFSET. The whole
508 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
509
510 static tree
511 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
512 {
513 HOST_WIDE_INT size, max_size;
514 tree expr, parm, obj;
515
516 if (!gimple_assign_single_p (assign))
517 return NULL_TREE;
518 expr = gimple_assign_rhs1 (assign);
519
520 if (TREE_CODE (expr) != ADDR_EXPR)
521 return NULL_TREE;
522 expr = TREE_OPERAND (expr, 0);
523 obj = expr;
524 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
525
526 if (TREE_CODE (expr) != MEM_REF
527 /* If this is a varying address, punt. */
528 || max_size == -1
529 || max_size != size
530 || *offset < 0)
531 return NULL_TREE;
532 parm = TREE_OPERAND (expr, 0);
533 if (TREE_CODE (parm) != SSA_NAME
534 || !SSA_NAME_IS_DEFAULT_DEF (parm)
535 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
536 return NULL_TREE;
537
538 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
539 *obj_p = obj;
540 return expr;
541 }
542
543
544 /* Given that an actual argument is an SSA_NAME that is a result of a phi
545 statement PHI, try to find out whether NAME is in fact a
546 multiple-inheritance typecast from a descendant into an ancestor of a formal
547 parameter and thus can be described by an ancestor jump function and if so,
548 write the appropriate function into JFUNC.
549
550 Essentially we want to match the following pattern:
551
552 if (obj_2(D) != 0B)
553 goto <bb 3>;
554 else
555 goto <bb 4>;
556
557 <bb 3>:
558 iftmp.1_3 = &obj_2(D)->D.1762;
559
560 <bb 4>:
561 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
562 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
563 return D.1879_6; */
564
565 static void
566 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
567 struct ipa_jump_func *jfunc,
568 gimple call, gimple phi)
569 {
570 HOST_WIDE_INT offset;
571 gimple assign, cond;
572 basic_block phi_bb, assign_bb, cond_bb;
573 tree tmp, parm, expr, obj;
574 int index, i;
575
576 if (gimple_phi_num_args (phi) != 2)
577 return;
578
579 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
580 tmp = PHI_ARG_DEF (phi, 0);
581 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
582 tmp = PHI_ARG_DEF (phi, 1);
583 else
584 return;
585 if (TREE_CODE (tmp) != SSA_NAME
586 || SSA_NAME_IS_DEFAULT_DEF (tmp)
587 || !POINTER_TYPE_P (TREE_TYPE (tmp))
588 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
589 return;
590
591 assign = SSA_NAME_DEF_STMT (tmp);
592 assign_bb = gimple_bb (assign);
593 if (!single_pred_p (assign_bb))
594 return;
595 expr = get_ancestor_addr_info (assign, &obj, &offset);
596 if (!expr)
597 return;
598 parm = TREE_OPERAND (expr, 0);
599 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
600 gcc_assert (index >= 0);
601
602 cond_bb = single_pred (assign_bb);
603 cond = last_stmt (cond_bb);
604 if (!cond
605 || gimple_code (cond) != GIMPLE_COND
606 || gimple_cond_code (cond) != NE_EXPR
607 || gimple_cond_lhs (cond) != parm
608 || !integer_zerop (gimple_cond_rhs (cond)))
609 return;
610
611 phi_bb = gimple_bb (phi);
612 for (i = 0; i < 2; i++)
613 {
614 basic_block pred = EDGE_PRED (phi_bb, i)->src;
615 if (pred != assign_bb && pred != cond_bb)
616 return;
617 }
618
619 if (!detect_type_change (obj, expr, call, jfunc, offset))
620 {
621 jfunc->type = IPA_JF_ANCESTOR;
622 jfunc->value.ancestor.formal_id = index;
623 jfunc->value.ancestor.offset = offset;
624 jfunc->value.ancestor.type = TREE_TYPE (obj);
625 }
626 }
627
628 /* Given OP which is passed as an actual argument to a called function,
629 determine if it is possible to construct a KNOWN_TYPE jump function for it
630 and if so, create one and store it to JFUNC. */
631
632 static void
633 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
634 gimple call)
635 {
636 HOST_WIDE_INT offset, size, max_size;
637 tree base, binfo;
638
639 if (!flag_devirtualize
640 || TREE_CODE (op) != ADDR_EXPR
641 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
642 return;
643
644 op = TREE_OPERAND (op, 0);
645 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
646 if (!DECL_P (base)
647 || max_size == -1
648 || max_size != size
649 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
650 || is_global_var (base))
651 return;
652
653 if (detect_type_change (op, base, call, jfunc, offset))
654 return;
655
656 binfo = TYPE_BINFO (TREE_TYPE (base));
657 if (!binfo)
658 return;
659 binfo = get_binfo_at_offset (binfo, offset, TREE_TYPE (op));
660 if (binfo)
661 {
662 jfunc->type = IPA_JF_KNOWN_TYPE;
663 jfunc->value.base_binfo = binfo;
664 }
665 }
666
667
668 /* Determine the jump functions of scalar arguments. Scalar means SSA names
669 and constants of a number of selected types. INFO is the ipa_node_params
670 structure associated with the caller, FUNCTIONS is a pointer to an array of
671 jump function structures associated with CALL which is the call statement
672 being examined.*/
673
674 static void
675 compute_scalar_jump_functions (struct ipa_node_params *info,
676 struct ipa_edge_args *args,
677 gimple call)
678 {
679 tree arg;
680 unsigned num = 0;
681
682 for (num = 0; num < gimple_call_num_args (call); num++)
683 {
684 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, num);
685 arg = gimple_call_arg (call, num);
686
687 if (is_gimple_ip_invariant (arg))
688 {
689 jfunc->type = IPA_JF_CONST;
690 jfunc->value.constant = arg;
691 }
692 else if (TREE_CODE (arg) == SSA_NAME)
693 {
694 if (SSA_NAME_IS_DEFAULT_DEF (arg))
695 {
696 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
697
698 if (index >= 0
699 && !detect_type_change_ssa (arg, call, jfunc))
700 {
701 jfunc->type = IPA_JF_PASS_THROUGH;
702 jfunc->value.pass_through.formal_id = index;
703 jfunc->value.pass_through.operation = NOP_EXPR;
704 }
705 }
706 else
707 {
708 gimple stmt = SSA_NAME_DEF_STMT (arg);
709 if (is_gimple_assign (stmt))
710 compute_complex_assign_jump_func (info, jfunc, call, stmt, arg);
711 else if (gimple_code (stmt) == GIMPLE_PHI)
712 compute_complex_ancestor_jump_func (info, jfunc, call, stmt);
713 }
714 }
715 else
716 compute_known_type_jump_func (arg, jfunc, call);
717 }
718 }
719
720 /* Inspect the given TYPE and return true iff it has the same structure (the
721 same number of fields of the same types) as a C++ member pointer. If
722 METHOD_PTR and DELTA are non-NULL, store the trees representing the
723 corresponding fields there. */
724
725 static bool
726 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
727 {
728 tree fld;
729
730 if (TREE_CODE (type) != RECORD_TYPE)
731 return false;
732
733 fld = TYPE_FIELDS (type);
734 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
735 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
736 return false;
737
738 if (method_ptr)
739 *method_ptr = fld;
740
741 fld = DECL_CHAIN (fld);
742 if (!fld || INTEGRAL_TYPE_P (fld))
743 return false;
744 if (delta)
745 *delta = fld;
746
747 if (DECL_CHAIN (fld))
748 return false;
749
750 return true;
751 }
752
753 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
754 boolean variable pointed to by DATA. */
755
756 static bool
757 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
758 void *data)
759 {
760 bool *b = (bool *) data;
761 *b = true;
762 return true;
763 }
764
765 /* Return true if the formal parameter PARM might have been modified in this
766 function before reaching the statement CALL. PARM_INFO is a pointer to a
767 structure containing intermediate information about PARM. */
768
769 static bool
770 is_parm_modified_before_call (struct param_analysis_info *parm_info,
771 gimple call, tree parm)
772 {
773 bool modified = false;
774 ao_ref refd;
775
776 if (parm_info->modified)
777 return true;
778
779 ao_ref_init (&refd, parm);
780 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
781 &modified, &parm_info->visited_statements);
782 if (modified)
783 {
784 parm_info->modified = true;
785 return true;
786 }
787 return false;
788 }
789
790 /* Go through arguments of the CALL and for every one that looks like a member
791 pointer, check whether it can be safely declared pass-through and if so,
792 mark that to the corresponding item of jump FUNCTIONS. Return true iff
793 there are non-pass-through member pointers within the arguments. INFO
794 describes formal parameters of the caller. PARMS_INFO is a pointer to a
795 vector containing intermediate information about each formal parameter. */
796
797 static bool
798 compute_pass_through_member_ptrs (struct ipa_node_params *info,
799 struct param_analysis_info *parms_info,
800 struct ipa_edge_args *args,
801 gimple call)
802 {
803 bool undecided_members = false;
804 unsigned num;
805 tree arg;
806
807 for (num = 0; num < gimple_call_num_args (call); num++)
808 {
809 arg = gimple_call_arg (call, num);
810
811 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
812 {
813 if (TREE_CODE (arg) == PARM_DECL)
814 {
815 int index = ipa_get_param_decl_index (info, arg);
816
817 gcc_assert (index >=0);
818 if (!is_parm_modified_before_call (&parms_info[index], call, arg))
819 {
820 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args,
821 num);
822 jfunc->type = IPA_JF_PASS_THROUGH;
823 jfunc->value.pass_through.formal_id = index;
824 jfunc->value.pass_through.operation = NOP_EXPR;
825 }
826 else
827 undecided_members = true;
828 }
829 else
830 undecided_members = true;
831 }
832 }
833
834 return undecided_members;
835 }
836
837 /* Simple function filling in a member pointer constant jump function (with PFN
838 and DELTA as the constant value) into JFUNC. */
839
840 static void
841 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
842 tree pfn, tree delta)
843 {
844 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
845 jfunc->value.member_cst.pfn = pfn;
846 jfunc->value.member_cst.delta = delta;
847 }
848
849 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
850 return the rhs of its defining statement. */
851
852 static inline tree
853 get_ssa_def_if_simple_copy (tree rhs)
854 {
855 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
856 {
857 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
858
859 if (gimple_assign_single_p (def_stmt))
860 rhs = gimple_assign_rhs1 (def_stmt);
861 else
862 break;
863 }
864 return rhs;
865 }
866
867 /* Traverse statements from CALL backwards, scanning whether the argument ARG
868 which is a member pointer is filled in with constant values. If it is, fill
869 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
870 fields of the record type of the member pointer. To give an example, we
871 look for a pattern looking like the following:
872
873 D.2515.__pfn ={v} printStuff;
874 D.2515.__delta ={v} 0;
875 i_1 = doprinting (D.2515); */
876
877 static void
878 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
879 tree delta_field, struct ipa_jump_func *jfunc)
880 {
881 gimple_stmt_iterator gsi;
882 tree method = NULL_TREE;
883 tree delta = NULL_TREE;
884
885 gsi = gsi_for_stmt (call);
886
887 gsi_prev (&gsi);
888 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
889 {
890 gimple stmt = gsi_stmt (gsi);
891 tree lhs, rhs, fld;
892
893 if (!stmt_may_clobber_ref_p (stmt, arg))
894 continue;
895 if (!gimple_assign_single_p (stmt))
896 return;
897
898 lhs = gimple_assign_lhs (stmt);
899 rhs = gimple_assign_rhs1 (stmt);
900
901 if (TREE_CODE (lhs) != COMPONENT_REF
902 || TREE_OPERAND (lhs, 0) != arg)
903 return;
904
905 fld = TREE_OPERAND (lhs, 1);
906 if (!method && fld == method_field)
907 {
908 rhs = get_ssa_def_if_simple_copy (rhs);
909 if (TREE_CODE (rhs) == ADDR_EXPR
910 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
911 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
912 {
913 method = TREE_OPERAND (rhs, 0);
914 if (delta)
915 {
916 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
917 return;
918 }
919 }
920 else
921 return;
922 }
923
924 if (!delta && fld == delta_field)
925 {
926 rhs = get_ssa_def_if_simple_copy (rhs);
927 if (TREE_CODE (rhs) == INTEGER_CST)
928 {
929 delta = rhs;
930 if (method)
931 {
932 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
933 return;
934 }
935 }
936 else
937 return;
938 }
939 }
940
941 return;
942 }
943
944 /* Go through the arguments of the CALL and for every member pointer within
945 tries determine whether it is a constant. If it is, create a corresponding
946 constant jump function in FUNCTIONS which is an array of jump functions
947 associated with the call. */
948
949 static void
950 compute_cst_member_ptr_arguments (struct ipa_edge_args *args,
951 gimple call)
952 {
953 unsigned num;
954 tree arg, method_field, delta_field;
955
956 for (num = 0; num < gimple_call_num_args (call); num++)
957 {
958 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, num);
959 arg = gimple_call_arg (call, num);
960
961 if (jfunc->type == IPA_JF_UNKNOWN
962 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
963 &delta_field))
964 determine_cst_member_ptr (call, arg, method_field, delta_field, jfunc);
965 }
966 }
967
968 /* Compute jump function for all arguments of callsite CS and insert the
969 information in the jump_functions array in the ipa_edge_args corresponding
970 to this callsite. */
971
972 static void
973 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_info,
974 struct cgraph_edge *cs)
975 {
976 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
977 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
978 gimple call = cs->call_stmt;
979 int arg_num = gimple_call_num_args (call);
980
981 if (arg_num == 0 || args->jump_functions)
982 return;
983 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
984
985 /* We will deal with constants and SSA scalars first: */
986 compute_scalar_jump_functions (info, args, call);
987
988 /* Let's check whether there are any potential member pointers and if so,
989 whether we can determine their functions as pass_through. */
990 if (!compute_pass_through_member_ptrs (info, parms_info, args, call))
991 return;
992
993 /* Finally, let's check whether we actually pass a new constant member
994 pointer here... */
995 compute_cst_member_ptr_arguments (args, call);
996 }
997
998 /* Compute jump functions for all edges - both direct and indirect - outgoing
999 from NODE. Also count the actual arguments in the process. */
1000
1001 static void
1002 ipa_compute_jump_functions (struct cgraph_node *node,
1003 struct param_analysis_info *parms_info)
1004 {
1005 struct cgraph_edge *cs;
1006
1007 for (cs = node->callees; cs; cs = cs->next_callee)
1008 {
1009 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1010 NULL);
1011 /* We do not need to bother analyzing calls to unknown
1012 functions unless they may become known during lto/whopr. */
1013 if (!callee->analyzed && !flag_lto)
1014 continue;
1015 ipa_compute_jump_functions_for_edge (parms_info, cs);
1016 }
1017
1018 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1019 ipa_compute_jump_functions_for_edge (parms_info, cs);
1020 }
1021
1022 /* If RHS looks like a rhs of a statement loading pfn from a member
1023 pointer formal parameter, return the parameter, otherwise return
1024 NULL. If USE_DELTA, then we look for a use of the delta field
1025 rather than the pfn. */
1026
1027 static tree
1028 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
1029 {
1030 tree rec, ref_field, ref_offset, fld, fld_offset, ptr_field, delta_field;
1031
1032 if (TREE_CODE (rhs) == COMPONENT_REF)
1033 {
1034 ref_field = TREE_OPERAND (rhs, 1);
1035 rhs = TREE_OPERAND (rhs, 0);
1036 }
1037 else
1038 ref_field = NULL_TREE;
1039 if (TREE_CODE (rhs) != MEM_REF)
1040 return NULL_TREE;
1041 rec = TREE_OPERAND (rhs, 0);
1042 if (TREE_CODE (rec) != ADDR_EXPR)
1043 return NULL_TREE;
1044 rec = TREE_OPERAND (rec, 0);
1045 if (TREE_CODE (rec) != PARM_DECL
1046 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1047 return NULL_TREE;
1048
1049 ref_offset = TREE_OPERAND (rhs, 1);
1050
1051 if (ref_field)
1052 {
1053 if (integer_nonzerop (ref_offset))
1054 return NULL_TREE;
1055
1056 if (use_delta)
1057 fld = delta_field;
1058 else
1059 fld = ptr_field;
1060
1061 return ref_field == fld ? rec : NULL_TREE;
1062 }
1063
1064 if (use_delta)
1065 fld_offset = byte_position (delta_field);
1066 else
1067 fld_offset = byte_position (ptr_field);
1068
1069 return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
1070 }
1071
1072 /* If STMT looks like a statement loading a value from a member pointer formal
1073 parameter, this function returns that parameter. */
1074
1075 static tree
1076 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
1077 {
1078 tree rhs;
1079
1080 if (!gimple_assign_single_p (stmt))
1081 return NULL_TREE;
1082
1083 rhs = gimple_assign_rhs1 (stmt);
1084 return ipa_get_member_ptr_load_param (rhs, use_delta);
1085 }
1086
1087 /* Returns true iff T is an SSA_NAME defined by a statement. */
1088
1089 static bool
1090 ipa_is_ssa_with_stmt_def (tree t)
1091 {
1092 if (TREE_CODE (t) == SSA_NAME
1093 && !SSA_NAME_IS_DEFAULT_DEF (t))
1094 return true;
1095 else
1096 return false;
1097 }
1098
1099 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1100 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1101 indirect call graph edge. */
1102
1103 static struct cgraph_edge *
1104 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1105 {
1106 struct cgraph_edge *cs;
1107
1108 cs = cgraph_edge (node, stmt);
1109 cs->indirect_info->param_index = param_index;
1110 cs->indirect_info->anc_offset = 0;
1111 cs->indirect_info->polymorphic = 0;
1112 return cs;
1113 }
1114
1115 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1116 (described by INFO). PARMS_INFO is a pointer to a vector containing
1117 intermediate information about each formal parameter. Currently it checks
1118 whether the call calls a pointer that is a formal parameter and if so, the
1119 parameter is marked with the called flag and an indirect call graph edge
1120 describing the call is created. This is very simple for ordinary pointers
1121 represented in SSA but not-so-nice when it comes to member pointers. The
1122 ugly part of this function does nothing more than trying to match the
1123 pattern of such a call. An example of such a pattern is the gimple dump
1124 below, the call is on the last line:
1125
1126 <bb 2>:
1127 f$__delta_5 = f.__delta;
1128 f$__pfn_24 = f.__pfn;
1129
1130 or
1131 <bb 2>:
1132 f$__delta_5 = MEM[(struct *)&f];
1133 f$__pfn_24 = MEM[(struct *)&f + 4B];
1134
1135 and a few lines below:
1136
1137 <bb 5>
1138 D.2496_3 = (int) f$__pfn_24;
1139 D.2497_4 = D.2496_3 & 1;
1140 if (D.2497_4 != 0)
1141 goto <bb 3>;
1142 else
1143 goto <bb 4>;
1144
1145 <bb 6>:
1146 D.2500_7 = (unsigned int) f$__delta_5;
1147 D.2501_8 = &S + D.2500_7;
1148 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1149 D.2503_10 = *D.2502_9;
1150 D.2504_12 = f$__pfn_24 + -1;
1151 D.2505_13 = (unsigned int) D.2504_12;
1152 D.2506_14 = D.2503_10 + D.2505_13;
1153 D.2507_15 = *D.2506_14;
1154 iftmp.11_16 = (String:: *) D.2507_15;
1155
1156 <bb 7>:
1157 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1158 D.2500_19 = (unsigned int) f$__delta_5;
1159 D.2508_20 = &S + D.2500_19;
1160 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1161
1162 Such patterns are results of simple calls to a member pointer:
1163
1164 int doprinting (int (MyString::* f)(int) const)
1165 {
1166 MyString S ("somestring");
1167
1168 return (S.*f)(4);
1169 }
1170 */
1171
1172 static void
1173 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1174 struct ipa_node_params *info,
1175 struct param_analysis_info *parms_info,
1176 gimple call, tree target)
1177 {
1178 gimple def;
1179 tree n1, n2;
1180 gimple d1, d2;
1181 tree rec, rec2, cond;
1182 gimple branch;
1183 int index;
1184 basic_block bb, virt_bb, join;
1185
1186 if (SSA_NAME_IS_DEFAULT_DEF (target))
1187 {
1188 tree var = SSA_NAME_VAR (target);
1189 index = ipa_get_param_decl_index (info, var);
1190 if (index >= 0)
1191 ipa_note_param_call (node, index, call);
1192 return;
1193 }
1194
1195 /* Now we need to try to match the complex pattern of calling a member
1196 pointer. */
1197
1198 if (!POINTER_TYPE_P (TREE_TYPE (target))
1199 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1200 return;
1201
1202 def = SSA_NAME_DEF_STMT (target);
1203 if (gimple_code (def) != GIMPLE_PHI)
1204 return;
1205
1206 if (gimple_phi_num_args (def) != 2)
1207 return;
1208
1209 /* First, we need to check whether one of these is a load from a member
1210 pointer that is a parameter to this function. */
1211 n1 = PHI_ARG_DEF (def, 0);
1212 n2 = PHI_ARG_DEF (def, 1);
1213 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1214 return;
1215 d1 = SSA_NAME_DEF_STMT (n1);
1216 d2 = SSA_NAME_DEF_STMT (n2);
1217
1218 join = gimple_bb (def);
1219 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1220 {
1221 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1222 return;
1223
1224 bb = EDGE_PRED (join, 0)->src;
1225 virt_bb = gimple_bb (d2);
1226 }
1227 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1228 {
1229 bb = EDGE_PRED (join, 1)->src;
1230 virt_bb = gimple_bb (d1);
1231 }
1232 else
1233 return;
1234
1235 /* Second, we need to check that the basic blocks are laid out in the way
1236 corresponding to the pattern. */
1237
1238 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1239 || single_pred (virt_bb) != bb
1240 || single_succ (virt_bb) != join)
1241 return;
1242
1243 /* Third, let's see that the branching is done depending on the least
1244 significant bit of the pfn. */
1245
1246 branch = last_stmt (bb);
1247 if (!branch || gimple_code (branch) != GIMPLE_COND)
1248 return;
1249
1250 if ((gimple_cond_code (branch) != NE_EXPR
1251 && gimple_cond_code (branch) != EQ_EXPR)
1252 || !integer_zerop (gimple_cond_rhs (branch)))
1253 return;
1254
1255 cond = gimple_cond_lhs (branch);
1256 if (!ipa_is_ssa_with_stmt_def (cond))
1257 return;
1258
1259 def = SSA_NAME_DEF_STMT (cond);
1260 if (!is_gimple_assign (def)
1261 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1262 || !integer_onep (gimple_assign_rhs2 (def)))
1263 return;
1264
1265 cond = gimple_assign_rhs1 (def);
1266 if (!ipa_is_ssa_with_stmt_def (cond))
1267 return;
1268
1269 def = SSA_NAME_DEF_STMT (cond);
1270
1271 if (is_gimple_assign (def)
1272 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1273 {
1274 cond = gimple_assign_rhs1 (def);
1275 if (!ipa_is_ssa_with_stmt_def (cond))
1276 return;
1277 def = SSA_NAME_DEF_STMT (cond);
1278 }
1279
1280 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1281 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1282 == ptrmemfunc_vbit_in_delta));
1283
1284 if (rec != rec2)
1285 return;
1286
1287 index = ipa_get_param_decl_index (info, rec);
1288 if (index >= 0 && !is_parm_modified_before_call (&parms_info[index],
1289 call, rec))
1290 ipa_note_param_call (node, index, call);
1291
1292 return;
1293 }
1294
1295 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1296 object referenced in the expression is a formal parameter of the caller
1297 (described by INFO), create a call note for the statement. */
1298
1299 static void
1300 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1301 struct ipa_node_params *info, gimple call,
1302 tree target)
1303 {
1304 struct cgraph_edge *cs;
1305 struct cgraph_indirect_call_info *ii;
1306 struct ipa_jump_func jfunc;
1307 tree obj = OBJ_TYPE_REF_OBJECT (target);
1308 int index;
1309 HOST_WIDE_INT anc_offset;
1310
1311 if (!flag_devirtualize)
1312 return;
1313
1314 if (TREE_CODE (obj) != SSA_NAME)
1315 return;
1316
1317 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1318 {
1319 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1320 return;
1321
1322 anc_offset = 0;
1323 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1324 gcc_assert (index >= 0);
1325 if (detect_type_change_ssa (obj, call, &jfunc))
1326 return;
1327 }
1328 else
1329 {
1330 gimple stmt = SSA_NAME_DEF_STMT (obj);
1331 tree expr;
1332
1333 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1334 if (!expr)
1335 return;
1336 index = ipa_get_param_decl_index (info,
1337 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1338 gcc_assert (index >= 0);
1339 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1340 return;
1341 }
1342
1343 cs = ipa_note_param_call (node, index, call);
1344 ii = cs->indirect_info;
1345 ii->anc_offset = anc_offset;
1346 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1347 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1348 ii->polymorphic = 1;
1349 }
1350
1351 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1352 of the caller (described by INFO). PARMS_INFO is a pointer to a vector
1353 containing intermediate information about each formal parameter. */
1354
1355 static void
1356 ipa_analyze_call_uses (struct cgraph_node *node,
1357 struct ipa_node_params *info,
1358 struct param_analysis_info *parms_info, gimple call)
1359 {
1360 tree target = gimple_call_fn (call);
1361
1362 if (!target)
1363 return;
1364 if (TREE_CODE (target) == SSA_NAME)
1365 ipa_analyze_indirect_call_uses (node, info, parms_info, call, target);
1366 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1367 ipa_analyze_virtual_call_uses (node, info, call, target);
1368 }
1369
1370
1371 /* Analyze the call statement STMT with respect to formal parameters (described
1372 in INFO) of caller given by NODE. Currently it only checks whether formal
1373 parameters are called. PARMS_INFO is a pointer to a vector containing
1374 intermediate information about each formal parameter. */
1375
1376 static void
1377 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1378 struct param_analysis_info *parms_info, gimple stmt)
1379 {
1380 if (is_gimple_call (stmt))
1381 ipa_analyze_call_uses (node, info, parms_info, stmt);
1382 }
1383
1384 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1385 If OP is a parameter declaration, mark it as used in the info structure
1386 passed in DATA. */
1387
1388 static bool
1389 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1390 tree op, void *data)
1391 {
1392 struct ipa_node_params *info = (struct ipa_node_params *) data;
1393
1394 op = get_base_address (op);
1395 if (op
1396 && TREE_CODE (op) == PARM_DECL)
1397 {
1398 int index = ipa_get_param_decl_index (info, op);
1399 gcc_assert (index >= 0);
1400 ipa_set_param_used (info, index, true);
1401 }
1402
1403 return false;
1404 }
1405
1406 /* Scan the function body of NODE and inspect the uses of formal parameters.
1407 Store the findings in various structures of the associated ipa_node_params
1408 structure, such as parameter flags, notes etc. PARMS_INFO is a pointer to a
1409 vector containing intermediate information about each formal parameter. */
1410
1411 static void
1412 ipa_analyze_params_uses (struct cgraph_node *node,
1413 struct param_analysis_info *parms_info)
1414 {
1415 tree decl = node->decl;
1416 basic_block bb;
1417 struct function *func;
1418 gimple_stmt_iterator gsi;
1419 struct ipa_node_params *info = IPA_NODE_REF (node);
1420 int i;
1421
1422 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1423 return;
1424
1425 for (i = 0; i < ipa_get_param_count (info); i++)
1426 {
1427 tree parm = ipa_get_param (info, i);
1428 /* For SSA regs see if parameter is used. For non-SSA we compute
1429 the flag during modification analysis. */
1430 if (is_gimple_reg (parm)
1431 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
1432 ipa_set_param_used (info, i, true);
1433 }
1434
1435 func = DECL_STRUCT_FUNCTION (decl);
1436 FOR_EACH_BB_FN (bb, func)
1437 {
1438 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1439 {
1440 gimple stmt = gsi_stmt (gsi);
1441
1442 if (is_gimple_debug (stmt))
1443 continue;
1444
1445 ipa_analyze_stmt_uses (node, info, parms_info, stmt);
1446 walk_stmt_load_store_addr_ops (stmt, info,
1447 visit_ref_for_mod_analysis,
1448 visit_ref_for_mod_analysis,
1449 visit_ref_for_mod_analysis);
1450 }
1451 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
1452 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1453 visit_ref_for_mod_analysis,
1454 visit_ref_for_mod_analysis,
1455 visit_ref_for_mod_analysis);
1456 }
1457
1458 info->uses_analysis_done = 1;
1459 }
1460
1461 /* Initialize the array describing properties of of formal parameters
1462 of NODE, analyze their uses and compute jump functions associated
1463 with actual arguments of calls from within NODE. */
1464
1465 void
1466 ipa_analyze_node (struct cgraph_node *node)
1467 {
1468 struct ipa_node_params *info;
1469 struct param_analysis_info *parms_info;
1470 int i, param_count;
1471
1472 ipa_check_create_node_params ();
1473 ipa_check_create_edge_args ();
1474 info = IPA_NODE_REF (node);
1475 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1476 current_function_decl = node->decl;
1477 ipa_initialize_node_params (node);
1478
1479 param_count = ipa_get_param_count (info);
1480 parms_info = XALLOCAVEC (struct param_analysis_info, param_count);
1481 memset (parms_info, 0, sizeof (struct param_analysis_info) * param_count);
1482
1483 ipa_analyze_params_uses (node, parms_info);
1484 ipa_compute_jump_functions (node, parms_info);
1485
1486 for (i = 0; i < param_count; i++)
1487 if (parms_info[i].visited_statements)
1488 BITMAP_FREE (parms_info[i].visited_statements);
1489
1490 current_function_decl = NULL;
1491 pop_cfun ();
1492 }
1493
1494
1495 /* Update the jump function DST when the call graph edge corresponding to SRC is
1496 is being inlined, knowing that DST is of type ancestor and src of known
1497 type. */
1498
1499 static void
1500 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1501 struct ipa_jump_func *dst)
1502 {
1503 tree new_binfo;
1504
1505 new_binfo = get_binfo_at_offset (src->value.base_binfo,
1506 dst->value.ancestor.offset,
1507 dst->value.ancestor.type);
1508 if (new_binfo)
1509 {
1510 dst->type = IPA_JF_KNOWN_TYPE;
1511 dst->value.base_binfo = new_binfo;
1512 }
1513 else
1514 dst->type = IPA_JF_UNKNOWN;
1515 }
1516
1517 /* Update the jump functions associated with call graph edge E when the call
1518 graph edge CS is being inlined, assuming that E->caller is already (possibly
1519 indirectly) inlined into CS->callee and that E has not been inlined. */
1520
1521 static void
1522 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1523 struct cgraph_edge *e)
1524 {
1525 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1526 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1527 int count = ipa_get_cs_argument_count (args);
1528 int i;
1529
1530 for (i = 0; i < count; i++)
1531 {
1532 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1533
1534 if (dst->type == IPA_JF_ANCESTOR)
1535 {
1536 struct ipa_jump_func *src;
1537
1538 /* Variable number of arguments can cause havoc if we try to access
1539 one that does not exist in the inlined edge. So make sure we
1540 don't. */
1541 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1542 {
1543 dst->type = IPA_JF_UNKNOWN;
1544 continue;
1545 }
1546
1547 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1548 if (src->type == IPA_JF_KNOWN_TYPE)
1549 combine_known_type_and_ancestor_jfs (src, dst);
1550 else if (src->type == IPA_JF_PASS_THROUGH
1551 && src->value.pass_through.operation == NOP_EXPR)
1552 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1553 else if (src->type == IPA_JF_ANCESTOR)
1554 {
1555 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1556 dst->value.ancestor.offset += src->value.ancestor.offset;
1557 }
1558 else
1559 dst->type = IPA_JF_UNKNOWN;
1560 }
1561 else if (dst->type == IPA_JF_PASS_THROUGH)
1562 {
1563 struct ipa_jump_func *src;
1564 /* We must check range due to calls with variable number of arguments
1565 and we cannot combine jump functions with operations. */
1566 if (dst->value.pass_through.operation == NOP_EXPR
1567 && (dst->value.pass_through.formal_id
1568 < ipa_get_cs_argument_count (top)))
1569 {
1570 src = ipa_get_ith_jump_func (top,
1571 dst->value.pass_through.formal_id);
1572 *dst = *src;
1573 }
1574 else
1575 dst->type = IPA_JF_UNKNOWN;
1576 }
1577 }
1578 }
1579
1580 /* If TARGET is an addr_expr of a function declaration, make it the destination
1581 of an indirect edge IE and return the edge. Otherwise, return NULL. */
1582
1583 struct cgraph_edge *
1584 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
1585 {
1586 struct cgraph_node *callee;
1587
1588 if (TREE_CODE (target) == ADDR_EXPR)
1589 target = TREE_OPERAND (target, 0);
1590 if (TREE_CODE (target) != FUNCTION_DECL)
1591 return NULL;
1592 callee = cgraph_get_node (target);
1593 if (!callee)
1594 return NULL;
1595 ipa_check_create_node_params ();
1596
1597 /* We can not make edges to inline clones. It is bug that someone removed
1598 the cgraph node too early. */
1599 gcc_assert (!callee->global.inlined_to);
1600
1601 cgraph_make_edge_direct (ie, callee);
1602 if (dump_file)
1603 {
1604 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1605 "(%s/%i -> %s/%i), for stmt ",
1606 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1607 cgraph_node_name (ie->caller), ie->caller->uid,
1608 cgraph_node_name (ie->callee), ie->callee->uid);
1609 if (ie->call_stmt)
1610 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1611 else
1612 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1613 }
1614 callee = cgraph_function_or_thunk_node (callee, NULL);
1615
1616 return ie;
1617 }
1618
1619 /* Try to find a destination for indirect edge IE that corresponds to a simple
1620 call or a call of a member function pointer and where the destination is a
1621 pointer formal parameter described by jump function JFUNC. If it can be
1622 determined, return the newly direct edge, otherwise return NULL. */
1623
1624 static struct cgraph_edge *
1625 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1626 struct ipa_jump_func *jfunc)
1627 {
1628 tree target;
1629
1630 if (jfunc->type == IPA_JF_CONST)
1631 target = jfunc->value.constant;
1632 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1633 target = jfunc->value.member_cst.pfn;
1634 else
1635 return NULL;
1636
1637 return ipa_make_edge_direct_to_target (ie, target);
1638 }
1639
1640 /* Try to find a destination for indirect edge IE that corresponds to a
1641 virtual call based on a formal parameter which is described by jump
1642 function JFUNC and if it can be determined, make it direct and return the
1643 direct edge. Otherwise, return NULL. */
1644
1645 static struct cgraph_edge *
1646 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1647 struct ipa_jump_func *jfunc)
1648 {
1649 tree binfo, type, target;
1650 HOST_WIDE_INT token;
1651
1652 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1653 binfo = jfunc->value.base_binfo;
1654 else
1655 return NULL;
1656
1657 if (!binfo)
1658 return NULL;
1659
1660 token = ie->indirect_info->otr_token;
1661 type = ie->indirect_info->otr_type;
1662 binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
1663 if (binfo)
1664 target = gimple_get_virt_method_for_binfo (token, binfo);
1665 else
1666 return NULL;
1667
1668 if (target)
1669 return ipa_make_edge_direct_to_target (ie, target);
1670 else
1671 return NULL;
1672 }
1673
1674 /* Update the param called notes associated with NODE when CS is being inlined,
1675 assuming NODE is (potentially indirectly) inlined into CS->callee.
1676 Moreover, if the callee is discovered to be constant, create a new cgraph
1677 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1678 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1679
1680 static bool
1681 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1682 struct cgraph_node *node,
1683 VEC (cgraph_edge_p, heap) **new_edges)
1684 {
1685 struct ipa_edge_args *top;
1686 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1687 bool res = false;
1688
1689 ipa_check_create_edge_args ();
1690 top = IPA_EDGE_REF (cs);
1691
1692 for (ie = node->indirect_calls; ie; ie = next_ie)
1693 {
1694 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1695 struct ipa_jump_func *jfunc;
1696
1697 next_ie = ie->next_callee;
1698
1699 if (ici->param_index == -1)
1700 continue;
1701
1702 /* We must check range due to calls with variable number of arguments: */
1703 if (ici->param_index >= ipa_get_cs_argument_count (top))
1704 {
1705 ici->param_index = -1;
1706 continue;
1707 }
1708
1709 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1710 if (jfunc->type == IPA_JF_PASS_THROUGH
1711 && jfunc->value.pass_through.operation == NOP_EXPR)
1712 ici->param_index = jfunc->value.pass_through.formal_id;
1713 else if (jfunc->type == IPA_JF_ANCESTOR)
1714 {
1715 ici->param_index = jfunc->value.ancestor.formal_id;
1716 ici->anc_offset += jfunc->value.ancestor.offset;
1717 }
1718 else
1719 /* Either we can find a destination for this edge now or never. */
1720 ici->param_index = -1;
1721
1722 if (!flag_indirect_inlining)
1723 continue;
1724
1725 if (ici->polymorphic)
1726 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1727 else
1728 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1729
1730 if (new_direct_edge)
1731 {
1732 new_direct_edge->indirect_inlining_edge = 1;
1733 if (new_edges)
1734 {
1735 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1736 new_direct_edge);
1737 top = IPA_EDGE_REF (cs);
1738 res = true;
1739 }
1740 }
1741 }
1742
1743 return res;
1744 }
1745
1746 /* Recursively traverse subtree of NODE (including node) made of inlined
1747 cgraph_edges when CS has been inlined and invoke
1748 update_indirect_edges_after_inlining on all nodes and
1749 update_jump_functions_after_inlining on all non-inlined edges that lead out
1750 of this subtree. Newly discovered indirect edges will be added to
1751 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1752 created. */
1753
1754 static bool
1755 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1756 struct cgraph_node *node,
1757 VEC (cgraph_edge_p, heap) **new_edges)
1758 {
1759 struct cgraph_edge *e;
1760 bool res;
1761
1762 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1763
1764 for (e = node->callees; e; e = e->next_callee)
1765 if (!e->inline_failed)
1766 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1767 else
1768 update_jump_functions_after_inlining (cs, e);
1769 for (e = node->indirect_calls; e; e = e->next_callee)
1770 update_jump_functions_after_inlining (cs, e);
1771
1772 return res;
1773 }
1774
1775 /* Update jump functions and call note functions on inlining the call site CS.
1776 CS is expected to lead to a node already cloned by
1777 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1778 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1779 created. */
1780
1781 bool
1782 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1783 VEC (cgraph_edge_p, heap) **new_edges)
1784 {
1785 bool changed;
1786 /* Do nothing if the preparation phase has not been carried out yet
1787 (i.e. during early inlining). */
1788 if (!ipa_node_params_vector)
1789 return false;
1790 gcc_assert (ipa_edge_args_vector);
1791
1792 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1793
1794 /* We do not keep jump functions of inlined edges up to date. Better to free
1795 them so we do not access them accidentally. */
1796 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1797 return changed;
1798 }
1799
1800 /* Frees all dynamically allocated structures that the argument info points
1801 to. */
1802
1803 void
1804 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1805 {
1806 if (args->jump_functions)
1807 ggc_free (args->jump_functions);
1808
1809 memset (args, 0, sizeof (*args));
1810 }
1811
1812 /* Free all ipa_edge structures. */
1813
1814 void
1815 ipa_free_all_edge_args (void)
1816 {
1817 int i;
1818 struct ipa_edge_args *args;
1819
1820 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
1821 ipa_free_edge_args_substructures (args);
1822
1823 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
1824 ipa_edge_args_vector = NULL;
1825 }
1826
1827 /* Frees all dynamically allocated structures that the param info points
1828 to. */
1829
1830 void
1831 ipa_free_node_params_substructures (struct ipa_node_params *info)
1832 {
1833 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
1834 free (info->lattices);
1835 /* Lattice values and their sources are deallocated with their alocation
1836 pool. */
1837 VEC_free (tree, heap, info->known_vals);
1838 memset (info, 0, sizeof (*info));
1839 }
1840
1841 /* Free all ipa_node_params structures. */
1842
1843 void
1844 ipa_free_all_node_params (void)
1845 {
1846 int i;
1847 struct ipa_node_params *info;
1848
1849 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
1850 ipa_free_node_params_substructures (info);
1851
1852 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
1853 ipa_node_params_vector = NULL;
1854 }
1855
1856 /* Hook that is called by cgraph.c when an edge is removed. */
1857
1858 static void
1859 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
1860 {
1861 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1862 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
1863 <= (unsigned)cs->uid)
1864 return;
1865 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1866 }
1867
1868 /* Hook that is called by cgraph.c when a node is removed. */
1869
1870 static void
1871 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1872 {
1873 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1874 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
1875 <= (unsigned)node->uid)
1876 return;
1877 ipa_free_node_params_substructures (IPA_NODE_REF (node));
1878 }
1879
1880 /* Hook that is called by cgraph.c when a node is duplicated. */
1881
1882 static void
1883 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
1884 __attribute__((unused)) void *data)
1885 {
1886 struct ipa_edge_args *old_args, *new_args;
1887
1888 ipa_check_create_edge_args ();
1889
1890 old_args = IPA_EDGE_REF (src);
1891 new_args = IPA_EDGE_REF (dst);
1892
1893 new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
1894 old_args->jump_functions);
1895 }
1896
1897 /* Hook that is called by cgraph.c when a node is duplicated. */
1898
1899 static void
1900 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
1901 ATTRIBUTE_UNUSED void *data)
1902 {
1903 struct ipa_node_params *old_info, *new_info;
1904
1905 ipa_check_create_node_params ();
1906 old_info = IPA_NODE_REF (src);
1907 new_info = IPA_NODE_REF (dst);
1908
1909 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
1910 old_info->descriptors);
1911 new_info->lattices = NULL;
1912 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
1913
1914 new_info->uses_analysis_done = old_info->uses_analysis_done;
1915 new_info->node_enqueued = old_info->node_enqueued;
1916 }
1917
1918
1919 /* Analyze newly added function into callgraph. */
1920
1921 static void
1922 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1923 {
1924 ipa_analyze_node (node);
1925 }
1926
1927 /* Register our cgraph hooks if they are not already there. */
1928
1929 void
1930 ipa_register_cgraph_hooks (void)
1931 {
1932 if (!edge_removal_hook_holder)
1933 edge_removal_hook_holder =
1934 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
1935 if (!node_removal_hook_holder)
1936 node_removal_hook_holder =
1937 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
1938 if (!edge_duplication_hook_holder)
1939 edge_duplication_hook_holder =
1940 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
1941 if (!node_duplication_hook_holder)
1942 node_duplication_hook_holder =
1943 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
1944 function_insertion_hook_holder =
1945 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
1946 }
1947
1948 /* Unregister our cgraph hooks if they are not already there. */
1949
1950 static void
1951 ipa_unregister_cgraph_hooks (void)
1952 {
1953 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
1954 edge_removal_hook_holder = NULL;
1955 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1956 node_removal_hook_holder = NULL;
1957 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
1958 edge_duplication_hook_holder = NULL;
1959 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1960 node_duplication_hook_holder = NULL;
1961 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
1962 function_insertion_hook_holder = NULL;
1963 }
1964
1965 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1966 longer needed after ipa-cp. */
1967
1968 void
1969 ipa_free_all_structures_after_ipa_cp (void)
1970 {
1971 if (!optimize)
1972 {
1973 ipa_free_all_edge_args ();
1974 ipa_free_all_node_params ();
1975 free_alloc_pool (ipcp_sources_pool);
1976 free_alloc_pool (ipcp_values_pool);
1977 ipa_unregister_cgraph_hooks ();
1978 }
1979 }
1980
1981 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1982 longer needed after indirect inlining. */
1983
1984 void
1985 ipa_free_all_structures_after_iinln (void)
1986 {
1987 ipa_free_all_edge_args ();
1988 ipa_free_all_node_params ();
1989 ipa_unregister_cgraph_hooks ();
1990 if (ipcp_sources_pool)
1991 free_alloc_pool (ipcp_sources_pool);
1992 if (ipcp_values_pool)
1993 free_alloc_pool (ipcp_values_pool);
1994 }
1995
1996 /* Print ipa_tree_map data structures of all functions in the
1997 callgraph to F. */
1998
1999 void
2000 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2001 {
2002 int i, count;
2003 tree temp;
2004 struct ipa_node_params *info;
2005
2006 if (!node->analyzed)
2007 return;
2008 info = IPA_NODE_REF (node);
2009 fprintf (f, " function %s parameter descriptors:\n",
2010 cgraph_node_name (node));
2011 count = ipa_get_param_count (info);
2012 for (i = 0; i < count; i++)
2013 {
2014 temp = ipa_get_param (info, i);
2015 if (TREE_CODE (temp) == PARM_DECL)
2016 fprintf (f, " param %d : %s", i,
2017 (DECL_NAME (temp)
2018 ? (*lang_hooks.decl_printable_name) (temp, 2)
2019 : "(unnamed)"));
2020 if (ipa_is_param_used (info, i))
2021 fprintf (f, " used");
2022 fprintf (f, "\n");
2023 }
2024 }
2025
2026 /* Print ipa_tree_map data structures of all functions in the
2027 callgraph to F. */
2028
2029 void
2030 ipa_print_all_params (FILE * f)
2031 {
2032 struct cgraph_node *node;
2033
2034 fprintf (f, "\nFunction parameters:\n");
2035 for (node = cgraph_nodes; node; node = node->next)
2036 ipa_print_node_params (f, node);
2037 }
2038
2039 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2040
2041 VEC(tree, heap) *
2042 ipa_get_vector_of_formal_parms (tree fndecl)
2043 {
2044 VEC(tree, heap) *args;
2045 int count;
2046 tree parm;
2047
2048 count = count_formal_params (fndecl);
2049 args = VEC_alloc (tree, heap, count);
2050 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2051 VEC_quick_push (tree, args, parm);
2052
2053 return args;
2054 }
2055
2056 /* Return a heap allocated vector containing types of formal parameters of
2057 function type FNTYPE. */
2058
2059 static inline VEC(tree, heap) *
2060 get_vector_of_formal_parm_types (tree fntype)
2061 {
2062 VEC(tree, heap) *types;
2063 int count = 0;
2064 tree t;
2065
2066 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2067 count++;
2068
2069 types = VEC_alloc (tree, heap, count);
2070 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2071 VEC_quick_push (tree, types, TREE_VALUE (t));
2072
2073 return types;
2074 }
2075
2076 /* Modify the function declaration FNDECL and its type according to the plan in
2077 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2078 to reflect the actual parameters being modified which are determined by the
2079 base_index field. */
2080
2081 void
2082 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2083 const char *synth_parm_prefix)
2084 {
2085 VEC(tree, heap) *oparms, *otypes;
2086 tree orig_type, new_type = NULL;
2087 tree old_arg_types, t, new_arg_types = NULL;
2088 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2089 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2090 tree new_reversed = NULL;
2091 bool care_for_types, last_parm_void;
2092
2093 if (!synth_parm_prefix)
2094 synth_parm_prefix = "SYNTH";
2095
2096 oparms = ipa_get_vector_of_formal_parms (fndecl);
2097 orig_type = TREE_TYPE (fndecl);
2098 old_arg_types = TYPE_ARG_TYPES (orig_type);
2099
2100 /* The following test is an ugly hack, some functions simply don't have any
2101 arguments in their type. This is probably a bug but well... */
2102 care_for_types = (old_arg_types != NULL_TREE);
2103 if (care_for_types)
2104 {
2105 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2106 == void_type_node);
2107 otypes = get_vector_of_formal_parm_types (orig_type);
2108 if (last_parm_void)
2109 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2110 else
2111 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2112 }
2113 else
2114 {
2115 last_parm_void = false;
2116 otypes = NULL;
2117 }
2118
2119 for (i = 0; i < len; i++)
2120 {
2121 struct ipa_parm_adjustment *adj;
2122 gcc_assert (link);
2123
2124 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2125 parm = VEC_index (tree, oparms, adj->base_index);
2126 adj->base = parm;
2127
2128 if (adj->copy_param)
2129 {
2130 if (care_for_types)
2131 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2132 adj->base_index),
2133 new_arg_types);
2134 *link = parm;
2135 link = &DECL_CHAIN (parm);
2136 }
2137 else if (!adj->remove_param)
2138 {
2139 tree new_parm;
2140 tree ptype;
2141
2142 if (adj->by_ref)
2143 ptype = build_pointer_type (adj->type);
2144 else
2145 ptype = adj->type;
2146
2147 if (care_for_types)
2148 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2149
2150 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2151 ptype);
2152 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2153
2154 DECL_ARTIFICIAL (new_parm) = 1;
2155 DECL_ARG_TYPE (new_parm) = ptype;
2156 DECL_CONTEXT (new_parm) = fndecl;
2157 TREE_USED (new_parm) = 1;
2158 DECL_IGNORED_P (new_parm) = 1;
2159 layout_decl (new_parm, 0);
2160
2161 add_referenced_var (new_parm);
2162 mark_sym_for_renaming (new_parm);
2163 adj->base = parm;
2164 adj->reduction = new_parm;
2165
2166 *link = new_parm;
2167
2168 link = &DECL_CHAIN (new_parm);
2169 }
2170 }
2171
2172 *link = NULL_TREE;
2173
2174 if (care_for_types)
2175 {
2176 new_reversed = nreverse (new_arg_types);
2177 if (last_parm_void)
2178 {
2179 if (new_reversed)
2180 TREE_CHAIN (new_arg_types) = void_list_node;
2181 else
2182 new_reversed = void_list_node;
2183 }
2184 }
2185
2186 /* Use copy_node to preserve as much as possible from original type
2187 (debug info, attribute lists etc.)
2188 Exception is METHOD_TYPEs must have THIS argument.
2189 When we are asked to remove it, we need to build new FUNCTION_TYPE
2190 instead. */
2191 if (TREE_CODE (orig_type) != METHOD_TYPE
2192 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2193 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2194 {
2195 new_type = build_distinct_type_copy (orig_type);
2196 TYPE_ARG_TYPES (new_type) = new_reversed;
2197 }
2198 else
2199 {
2200 new_type
2201 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2202 new_reversed));
2203 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2204 DECL_VINDEX (fndecl) = NULL_TREE;
2205 }
2206
2207 /* When signature changes, we need to clear builtin info. */
2208 if (DECL_BUILT_IN (fndecl))
2209 {
2210 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2211 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2212 }
2213
2214 /* This is a new type, not a copy of an old type. Need to reassociate
2215 variants. We can handle everything except the main variant lazily. */
2216 t = TYPE_MAIN_VARIANT (orig_type);
2217 if (orig_type != t)
2218 {
2219 TYPE_MAIN_VARIANT (new_type) = t;
2220 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2221 TYPE_NEXT_VARIANT (t) = new_type;
2222 }
2223 else
2224 {
2225 TYPE_MAIN_VARIANT (new_type) = new_type;
2226 TYPE_NEXT_VARIANT (new_type) = NULL;
2227 }
2228
2229 TREE_TYPE (fndecl) = new_type;
2230 DECL_VIRTUAL_P (fndecl) = 0;
2231 if (otypes)
2232 VEC_free (tree, heap, otypes);
2233 VEC_free (tree, heap, oparms);
2234 }
2235
2236 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2237 If this is a directly recursive call, CS must be NULL. Otherwise it must
2238 contain the corresponding call graph edge. */
2239
2240 void
2241 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2242 ipa_parm_adjustment_vec adjustments)
2243 {
2244 VEC(tree, heap) *vargs;
2245 VEC(tree, gc) **debug_args = NULL;
2246 gimple new_stmt;
2247 gimple_stmt_iterator gsi;
2248 tree callee_decl;
2249 int i, len;
2250
2251 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2252 vargs = VEC_alloc (tree, heap, len);
2253 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2254
2255 gsi = gsi_for_stmt (stmt);
2256 for (i = 0; i < len; i++)
2257 {
2258 struct ipa_parm_adjustment *adj;
2259
2260 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2261
2262 if (adj->copy_param)
2263 {
2264 tree arg = gimple_call_arg (stmt, adj->base_index);
2265
2266 VEC_quick_push (tree, vargs, arg);
2267 }
2268 else if (!adj->remove_param)
2269 {
2270 tree expr, base, off;
2271 location_t loc;
2272
2273 /* We create a new parameter out of the value of the old one, we can
2274 do the following kind of transformations:
2275
2276 - A scalar passed by reference is converted to a scalar passed by
2277 value. (adj->by_ref is false and the type of the original
2278 actual argument is a pointer to a scalar).
2279
2280 - A part of an aggregate is passed instead of the whole aggregate.
2281 The part can be passed either by value or by reference, this is
2282 determined by value of adj->by_ref. Moreover, the code below
2283 handles both situations when the original aggregate is passed by
2284 value (its type is not a pointer) and when it is passed by
2285 reference (it is a pointer to an aggregate).
2286
2287 When the new argument is passed by reference (adj->by_ref is true)
2288 it must be a part of an aggregate and therefore we form it by
2289 simply taking the address of a reference inside the original
2290 aggregate. */
2291
2292 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2293 base = gimple_call_arg (stmt, adj->base_index);
2294 loc = EXPR_LOCATION (base);
2295
2296 if (TREE_CODE (base) != ADDR_EXPR
2297 && POINTER_TYPE_P (TREE_TYPE (base)))
2298 off = build_int_cst (adj->alias_ptr_type,
2299 adj->offset / BITS_PER_UNIT);
2300 else
2301 {
2302 HOST_WIDE_INT base_offset;
2303 tree prev_base;
2304
2305 if (TREE_CODE (base) == ADDR_EXPR)
2306 base = TREE_OPERAND (base, 0);
2307 prev_base = base;
2308 base = get_addr_base_and_unit_offset (base, &base_offset);
2309 /* Aggregate arguments can have non-invariant addresses. */
2310 if (!base)
2311 {
2312 base = build_fold_addr_expr (prev_base);
2313 off = build_int_cst (adj->alias_ptr_type,
2314 adj->offset / BITS_PER_UNIT);
2315 }
2316 else if (TREE_CODE (base) == MEM_REF)
2317 {
2318 off = build_int_cst (adj->alias_ptr_type,
2319 base_offset
2320 + adj->offset / BITS_PER_UNIT);
2321 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2322 off);
2323 base = TREE_OPERAND (base, 0);
2324 }
2325 else
2326 {
2327 off = build_int_cst (adj->alias_ptr_type,
2328 base_offset
2329 + adj->offset / BITS_PER_UNIT);
2330 base = build_fold_addr_expr (base);
2331 }
2332 }
2333
2334 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2335 if (adj->by_ref)
2336 expr = build_fold_addr_expr (expr);
2337
2338 expr = force_gimple_operand_gsi (&gsi, expr,
2339 adj->by_ref
2340 || is_gimple_reg_type (adj->type),
2341 NULL, true, GSI_SAME_STMT);
2342 VEC_quick_push (tree, vargs, expr);
2343 }
2344 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2345 {
2346 unsigned int ix;
2347 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2348 gimple def_temp;
2349
2350 arg = gimple_call_arg (stmt, adj->base_index);
2351 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2352 {
2353 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2354 continue;
2355 arg = fold_convert_loc (gimple_location (stmt),
2356 TREE_TYPE (origin), arg);
2357 }
2358 if (debug_args == NULL)
2359 debug_args = decl_debug_args_insert (callee_decl);
2360 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
2361 if (ddecl == origin)
2362 {
2363 ddecl = VEC_index (tree, *debug_args, ix + 1);
2364 break;
2365 }
2366 if (ddecl == NULL)
2367 {
2368 ddecl = make_node (DEBUG_EXPR_DECL);
2369 DECL_ARTIFICIAL (ddecl) = 1;
2370 TREE_TYPE (ddecl) = TREE_TYPE (origin);
2371 DECL_MODE (ddecl) = DECL_MODE (origin);
2372
2373 VEC_safe_push (tree, gc, *debug_args, origin);
2374 VEC_safe_push (tree, gc, *debug_args, ddecl);
2375 }
2376 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
2377 stmt);
2378 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
2379 }
2380 }
2381
2382 if (dump_file && (dump_flags & TDF_DETAILS))
2383 {
2384 fprintf (dump_file, "replacing stmt:");
2385 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2386 }
2387
2388 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2389 VEC_free (tree, heap, vargs);
2390 if (gimple_call_lhs (stmt))
2391 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2392
2393 gimple_set_block (new_stmt, gimple_block (stmt));
2394 if (gimple_has_location (stmt))
2395 gimple_set_location (new_stmt, gimple_location (stmt));
2396 gimple_call_copy_flags (new_stmt, stmt);
2397 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2398
2399 if (dump_file && (dump_flags & TDF_DETAILS))
2400 {
2401 fprintf (dump_file, "with stmt:");
2402 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2403 fprintf (dump_file, "\n");
2404 }
2405 gsi_replace (&gsi, new_stmt, true);
2406 if (cs)
2407 cgraph_set_call_stmt (cs, new_stmt);
2408 update_ssa (TODO_update_ssa);
2409 free_dominance_info (CDI_DOMINATORS);
2410 }
2411
2412 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2413
2414 static bool
2415 index_in_adjustments_multiple_times_p (int base_index,
2416 ipa_parm_adjustment_vec adjustments)
2417 {
2418 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2419 bool one = false;
2420
2421 for (i = 0; i < len; i++)
2422 {
2423 struct ipa_parm_adjustment *adj;
2424 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2425
2426 if (adj->base_index == base_index)
2427 {
2428 if (one)
2429 return true;
2430 else
2431 one = true;
2432 }
2433 }
2434 return false;
2435 }
2436
2437
2438 /* Return adjustments that should have the same effect on function parameters
2439 and call arguments as if they were first changed according to adjustments in
2440 INNER and then by adjustments in OUTER. */
2441
2442 ipa_parm_adjustment_vec
2443 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2444 ipa_parm_adjustment_vec outer)
2445 {
2446 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2447 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2448 int removals = 0;
2449 ipa_parm_adjustment_vec adjustments, tmp;
2450
2451 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2452 for (i = 0; i < inlen; i++)
2453 {
2454 struct ipa_parm_adjustment *n;
2455 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2456
2457 if (n->remove_param)
2458 removals++;
2459 else
2460 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2461 }
2462
2463 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2464 for (i = 0; i < outlen; i++)
2465 {
2466 struct ipa_parm_adjustment *r;
2467 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2468 outer, i);
2469 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2470 out->base_index);
2471
2472 gcc_assert (!in->remove_param);
2473 if (out->remove_param)
2474 {
2475 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2476 {
2477 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2478 memset (r, 0, sizeof (*r));
2479 r->remove_param = true;
2480 }
2481 continue;
2482 }
2483
2484 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2485 memset (r, 0, sizeof (*r));
2486 r->base_index = in->base_index;
2487 r->type = out->type;
2488
2489 /* FIXME: Create nonlocal value too. */
2490
2491 if (in->copy_param && out->copy_param)
2492 r->copy_param = true;
2493 else if (in->copy_param)
2494 r->offset = out->offset;
2495 else if (out->copy_param)
2496 r->offset = in->offset;
2497 else
2498 r->offset = in->offset + out->offset;
2499 }
2500
2501 for (i = 0; i < inlen; i++)
2502 {
2503 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2504 inner, i);
2505
2506 if (n->remove_param)
2507 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2508 }
2509
2510 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2511 return adjustments;
2512 }
2513
2514 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2515 friendly way, assuming they are meant to be applied to FNDECL. */
2516
2517 void
2518 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2519 tree fndecl)
2520 {
2521 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2522 bool first = true;
2523 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2524
2525 fprintf (file, "IPA param adjustments: ");
2526 for (i = 0; i < len; i++)
2527 {
2528 struct ipa_parm_adjustment *adj;
2529 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2530
2531 if (!first)
2532 fprintf (file, " ");
2533 else
2534 first = false;
2535
2536 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2537 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2538 if (adj->base)
2539 {
2540 fprintf (file, ", base: ");
2541 print_generic_expr (file, adj->base, 0);
2542 }
2543 if (adj->reduction)
2544 {
2545 fprintf (file, ", reduction: ");
2546 print_generic_expr (file, adj->reduction, 0);
2547 }
2548 if (adj->new_ssa_base)
2549 {
2550 fprintf (file, ", new_ssa_base: ");
2551 print_generic_expr (file, adj->new_ssa_base, 0);
2552 }
2553
2554 if (adj->copy_param)
2555 fprintf (file, ", copy_param");
2556 else if (adj->remove_param)
2557 fprintf (file, ", remove_param");
2558 else
2559 fprintf (file, ", offset %li", (long) adj->offset);
2560 if (adj->by_ref)
2561 fprintf (file, ", by_ref");
2562 print_node_brief (file, ", type: ", adj->type, 0);
2563 fprintf (file, "\n");
2564 }
2565 VEC_free (tree, heap, parms);
2566 }
2567
2568 /* Stream out jump function JUMP_FUNC to OB. */
2569
2570 static void
2571 ipa_write_jump_function (struct output_block *ob,
2572 struct ipa_jump_func *jump_func)
2573 {
2574 streamer_write_uhwi (ob, jump_func->type);
2575
2576 switch (jump_func->type)
2577 {
2578 case IPA_JF_UNKNOWN:
2579 break;
2580 case IPA_JF_KNOWN_TYPE:
2581 stream_write_tree (ob, jump_func->value.base_binfo, true);
2582 break;
2583 case IPA_JF_CONST:
2584 stream_write_tree (ob, jump_func->value.constant, true);
2585 break;
2586 case IPA_JF_PASS_THROUGH:
2587 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
2588 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
2589 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
2590 break;
2591 case IPA_JF_ANCESTOR:
2592 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
2593 stream_write_tree (ob, jump_func->value.ancestor.type, true);
2594 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
2595 break;
2596 case IPA_JF_CONST_MEMBER_PTR:
2597 stream_write_tree (ob, jump_func->value.member_cst.pfn, true);
2598 stream_write_tree (ob, jump_func->value.member_cst.delta, false);
2599 break;
2600 }
2601 }
2602
2603 /* Read in jump function JUMP_FUNC from IB. */
2604
2605 static void
2606 ipa_read_jump_function (struct lto_input_block *ib,
2607 struct ipa_jump_func *jump_func,
2608 struct data_in *data_in)
2609 {
2610 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
2611
2612 switch (jump_func->type)
2613 {
2614 case IPA_JF_UNKNOWN:
2615 break;
2616 case IPA_JF_KNOWN_TYPE:
2617 jump_func->value.base_binfo = stream_read_tree (ib, data_in);
2618 break;
2619 case IPA_JF_CONST:
2620 jump_func->value.constant = stream_read_tree (ib, data_in);
2621 break;
2622 case IPA_JF_PASS_THROUGH:
2623 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
2624 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
2625 jump_func->value.pass_through.operation
2626 = (enum tree_code) streamer_read_uhwi (ib);
2627 break;
2628 case IPA_JF_ANCESTOR:
2629 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
2630 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
2631 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
2632 break;
2633 case IPA_JF_CONST_MEMBER_PTR:
2634 jump_func->value.member_cst.pfn = stream_read_tree (ib, data_in);
2635 jump_func->value.member_cst.delta = stream_read_tree (ib, data_in);
2636 break;
2637 }
2638 }
2639
2640 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2641 relevant to indirect inlining to OB. */
2642
2643 static void
2644 ipa_write_indirect_edge_info (struct output_block *ob,
2645 struct cgraph_edge *cs)
2646 {
2647 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2648 struct bitpack_d bp;
2649
2650 streamer_write_hwi (ob, ii->param_index);
2651 streamer_write_hwi (ob, ii->anc_offset);
2652 bp = bitpack_create (ob->main_stream);
2653 bp_pack_value (&bp, ii->polymorphic, 1);
2654 streamer_write_bitpack (&bp);
2655
2656 if (ii->polymorphic)
2657 {
2658 streamer_write_hwi (ob, ii->otr_token);
2659 stream_write_tree (ob, ii->otr_type, true);
2660 }
2661 }
2662
2663 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2664 relevant to indirect inlining from IB. */
2665
2666 static void
2667 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2668 struct data_in *data_in ATTRIBUTE_UNUSED,
2669 struct cgraph_edge *cs)
2670 {
2671 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2672 struct bitpack_d bp;
2673
2674 ii->param_index = (int) streamer_read_hwi (ib);
2675 ii->anc_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
2676 bp = streamer_read_bitpack (ib);
2677 ii->polymorphic = bp_unpack_value (&bp, 1);
2678 if (ii->polymorphic)
2679 {
2680 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
2681 ii->otr_type = stream_read_tree (ib, data_in);
2682 }
2683 }
2684
2685 /* Stream out NODE info to OB. */
2686
2687 static void
2688 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2689 {
2690 int node_ref;
2691 lto_cgraph_encoder_t encoder;
2692 struct ipa_node_params *info = IPA_NODE_REF (node);
2693 int j;
2694 struct cgraph_edge *e;
2695 struct bitpack_d bp;
2696
2697 encoder = ob->decl_state->cgraph_node_encoder;
2698 node_ref = lto_cgraph_encoder_encode (encoder, node);
2699 streamer_write_uhwi (ob, node_ref);
2700
2701 bp = bitpack_create (ob->main_stream);
2702 gcc_assert (info->uses_analysis_done
2703 || ipa_get_param_count (info) == 0);
2704 gcc_assert (!info->node_enqueued);
2705 gcc_assert (!info->ipcp_orig_node);
2706 for (j = 0; j < ipa_get_param_count (info); j++)
2707 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
2708 streamer_write_bitpack (&bp);
2709 for (e = node->callees; e; e = e->next_callee)
2710 {
2711 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2712
2713 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
2714 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2715 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2716 }
2717 for (e = node->indirect_calls; e; e = e->next_callee)
2718 {
2719 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2720
2721 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
2722 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2723 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2724 ipa_write_indirect_edge_info (ob, e);
2725 }
2726 }
2727
2728 /* Stream in NODE info from IB. */
2729
2730 static void
2731 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2732 struct data_in *data_in)
2733 {
2734 struct ipa_node_params *info = IPA_NODE_REF (node);
2735 int k;
2736 struct cgraph_edge *e;
2737 struct bitpack_d bp;
2738
2739 ipa_initialize_node_params (node);
2740
2741 bp = streamer_read_bitpack (ib);
2742 if (ipa_get_param_count (info) != 0)
2743 info->uses_analysis_done = true;
2744 info->node_enqueued = false;
2745 for (k = 0; k < ipa_get_param_count (info); k++)
2746 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
2747 for (e = node->callees; e; e = e->next_callee)
2748 {
2749 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2750 int count = streamer_read_uhwi (ib);
2751
2752 if (!count)
2753 continue;
2754 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
2755
2756 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2757 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2758 }
2759 for (e = node->indirect_calls; e; e = e->next_callee)
2760 {
2761 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2762 int count = streamer_read_uhwi (ib);
2763
2764 if (count)
2765 {
2766 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
2767 count);
2768 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2769 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
2770 data_in);
2771 }
2772 ipa_read_indirect_edge_info (ib, data_in, e);
2773 }
2774 }
2775
2776 /* Write jump functions for nodes in SET. */
2777
2778 void
2779 ipa_prop_write_jump_functions (cgraph_node_set set)
2780 {
2781 struct cgraph_node *node;
2782 struct output_block *ob;
2783 unsigned int count = 0;
2784 cgraph_node_set_iterator csi;
2785
2786 if (!ipa_node_params_vector)
2787 return;
2788
2789 ob = create_output_block (LTO_section_jump_functions);
2790 ob->cgraph_node = NULL;
2791 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2792 {
2793 node = csi_node (csi);
2794 if (cgraph_function_with_gimple_body_p (node)
2795 && IPA_NODE_REF (node) != NULL)
2796 count++;
2797 }
2798
2799 streamer_write_uhwi (ob, count);
2800
2801 /* Process all of the functions. */
2802 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2803 {
2804 node = csi_node (csi);
2805 if (cgraph_function_with_gimple_body_p (node)
2806 && IPA_NODE_REF (node) != NULL)
2807 ipa_write_node_info (ob, node);
2808 }
2809 streamer_write_char_stream (ob->main_stream, 0);
2810 produce_asm (ob, NULL);
2811 destroy_output_block (ob);
2812 }
2813
2814 /* Read section in file FILE_DATA of length LEN with data DATA. */
2815
2816 static void
2817 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
2818 size_t len)
2819 {
2820 const struct lto_function_header *header =
2821 (const struct lto_function_header *) data;
2822 const int32_t cfg_offset = sizeof (struct lto_function_header);
2823 const int32_t main_offset = cfg_offset + header->cfg_size;
2824 const int32_t string_offset = main_offset + header->main_size;
2825 struct data_in *data_in;
2826 struct lto_input_block ib_main;
2827 unsigned int i;
2828 unsigned int count;
2829
2830 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2831 header->main_size);
2832
2833 data_in =
2834 lto_data_in_create (file_data, (const char *) data + string_offset,
2835 header->string_size, NULL);
2836 count = streamer_read_uhwi (&ib_main);
2837
2838 for (i = 0; i < count; i++)
2839 {
2840 unsigned int index;
2841 struct cgraph_node *node;
2842 lto_cgraph_encoder_t encoder;
2843
2844 index = streamer_read_uhwi (&ib_main);
2845 encoder = file_data->cgraph_node_encoder;
2846 node = lto_cgraph_encoder_deref (encoder, index);
2847 gcc_assert (node->analyzed);
2848 ipa_read_node_info (&ib_main, node, data_in);
2849 }
2850 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
2851 len);
2852 lto_data_in_delete (data_in);
2853 }
2854
2855 /* Read ipcp jump functions. */
2856
2857 void
2858 ipa_prop_read_jump_functions (void)
2859 {
2860 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2861 struct lto_file_decl_data *file_data;
2862 unsigned int j = 0;
2863
2864 ipa_check_create_node_params ();
2865 ipa_check_create_edge_args ();
2866 ipa_register_cgraph_hooks ();
2867
2868 while ((file_data = file_data_vec[j++]))
2869 {
2870 size_t len;
2871 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
2872
2873 if (data)
2874 ipa_prop_read_section (file_data, data, len);
2875 }
2876 }
2877
2878 /* After merging units, we can get mismatch in argument counts.
2879 Also decl merging might've rendered parameter lists obsolete.
2880 Also compute called_with_variable_arg info. */
2881
2882 void
2883 ipa_update_after_lto_read (void)
2884 {
2885 struct cgraph_node *node;
2886
2887 ipa_check_create_node_params ();
2888 ipa_check_create_edge_args ();
2889
2890 for (node = cgraph_nodes; node; node = node->next)
2891 if (node->analyzed)
2892 ipa_initialize_node_params (node);
2893 }