1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "langhooks.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
41 #include "data-streamer.h"
42 #include "tree-streamer.h"
45 /* Intermediate information about a parameter that is only useful during the
46 run of ipa_analyze_node and is not kept afterwards. */
48 struct param_analysis_info
51 bitmap visited_statements
;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_node_params_t
, heap
) *ipa_node_params_vector
;
56 /* Vector where the parameter infos are actually stored. */
57 VEC (ipa_edge_args_t
, gc
) *ipa_edge_args_vector
;
59 /* Holders of ipa cgraph hooks: */
60 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
61 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
62 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
63 static struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
64 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
66 /* Return index of the formal whose tree is PTREE in function which corresponds
70 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
74 count
= ipa_get_param_count (info
);
75 for (i
= 0; i
< count
; i
++)
76 if (ipa_get_param (info
, i
) == ptree
)
82 /* Populate the param_decl field in parameter descriptors of INFO that
83 corresponds to NODE. */
86 ipa_populate_param_decls (struct cgraph_node
*node
,
87 struct ipa_node_params
*info
)
95 fnargs
= DECL_ARGUMENTS (fndecl
);
97 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
99 VEC_index (ipa_param_descriptor_t
,
100 info
->descriptors
, param_num
)->decl
= parm
;
105 /* Return how many formal parameters FNDECL has. */
108 count_formal_params (tree fndecl
)
113 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
119 /* Initialize the ipa_node_params structure associated with NODE by counting
120 the function parameters, creating the descriptors and populating their
124 ipa_initialize_node_params (struct cgraph_node
*node
)
126 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
128 if (!info
->descriptors
)
132 param_count
= count_formal_params (node
->decl
);
135 VEC_safe_grow_cleared (ipa_param_descriptor_t
, heap
,
136 info
->descriptors
, param_count
);
137 ipa_populate_param_decls (node
, info
);
142 /* Print the jump functions associated with call graph edge CS to file F. */
145 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
149 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
150 for (i
= 0; i
< count
; i
++)
152 struct ipa_jump_func
*jump_func
;
153 enum jump_func_type type
;
155 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
156 type
= jump_func
->type
;
158 fprintf (f
, " param %d: ", i
);
159 if (type
== IPA_JF_UNKNOWN
)
160 fprintf (f
, "UNKNOWN\n");
161 else if (type
== IPA_JF_KNOWN_TYPE
)
163 tree binfo_type
= TREE_TYPE (jump_func
->value
.base_binfo
);
164 fprintf (f
, "KNOWN TYPE, type in binfo is: ");
165 print_generic_expr (f
, binfo_type
, 0);
166 fprintf (f
, " (%u)\n", TYPE_UID (binfo_type
));
168 else if (type
== IPA_JF_CONST
)
170 tree val
= jump_func
->value
.constant
;
171 fprintf (f
, "CONST: ");
172 print_generic_expr (f
, val
, 0);
173 if (TREE_CODE (val
) == ADDR_EXPR
174 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
177 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
182 else if (type
== IPA_JF_CONST_MEMBER_PTR
)
184 fprintf (f
, "CONST MEMBER PTR: ");
185 print_generic_expr (f
, jump_func
->value
.member_cst
.pfn
, 0);
187 print_generic_expr (f
, jump_func
->value
.member_cst
.delta
, 0);
190 else if (type
== IPA_JF_PASS_THROUGH
)
192 fprintf (f
, "PASS THROUGH: ");
193 fprintf (f
, "%d, op %s ",
194 jump_func
->value
.pass_through
.formal_id
,
196 jump_func
->value
.pass_through
.operation
]);
197 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
198 print_generic_expr (f
,
199 jump_func
->value
.pass_through
.operand
, 0);
202 else if (type
== IPA_JF_ANCESTOR
)
204 fprintf (f
, "ANCESTOR: ");
205 fprintf (f
, "%d, offset "HOST_WIDE_INT_PRINT_DEC
", ",
206 jump_func
->value
.ancestor
.formal_id
,
207 jump_func
->value
.ancestor
.offset
);
208 print_generic_expr (f
, jump_func
->value
.ancestor
.type
, 0);
215 /* Print the jump functions of all arguments on all call graph edges going from
219 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
221 struct cgraph_edge
*cs
;
224 fprintf (f
, " Jump functions of caller %s:\n", cgraph_node_name (node
));
225 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
227 if (!ipa_edge_args_info_available_for_edge_p (cs
))
230 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
231 cgraph_node_name (node
), node
->uid
,
232 cgraph_node_name (cs
->callee
), cs
->callee
->uid
);
233 ipa_print_node_jump_functions_for_edge (f
, cs
);
236 for (cs
= node
->indirect_calls
, i
= 0; cs
; cs
= cs
->next_callee
, i
++)
238 if (!ipa_edge_args_info_available_for_edge_p (cs
))
243 fprintf (f
, " indirect callsite %d for stmt ", i
);
244 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
247 fprintf (f
, " indirect callsite %d :\n", i
);
248 ipa_print_node_jump_functions_for_edge (f
, cs
);
253 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
256 ipa_print_all_jump_functions (FILE *f
)
258 struct cgraph_node
*node
;
260 fprintf (f
, "\nJump functions:\n");
261 for (node
= cgraph_nodes
; node
; node
= node
->next
)
263 ipa_print_node_jump_functions (f
, node
);
267 /* Structure to be passed in between detect_type_change and
268 check_stmt_for_type_change. */
270 struct type_change_info
272 /* Set to true if dynamic type change has been detected. */
273 bool type_maybe_changed
;
276 /* Return true if STMT can modify a virtual method table pointer.
278 This function makes special assumptions about both constructors and
279 destructors which are all the functions that are allowed to alter the VMT
280 pointers. It assumes that destructors begin with assignment into all VMT
281 pointers and that constructors essentially look in the following way:
283 1) The very first thing they do is that they call constructors of ancestor
284 sub-objects that have them.
286 2) Then VMT pointers of this and all its ancestors is set to new values
287 corresponding to the type corresponding to the constructor.
289 3) Only afterwards, other stuff such as constructor of member sub-objects
290 and the code written by the user is run. Only this may include calling
291 virtual functions, directly or indirectly.
293 There is no way to call a constructor of an ancestor sub-object in any
296 This means that we do not have to care whether constructors get the correct
297 type information because they will always change it (in fact, if we define
298 the type to be given by the VMT pointer, it is undefined).
300 The most important fact to derive from the above is that if, for some
301 statement in the section 3, we try to detect whether the dynamic type has
302 changed, we can safely ignore all calls as we examine the function body
303 backwards until we reach statements in section 2 because these calls cannot
304 be ancestor constructors or destructors (if the input is not bogus) and so
305 do not change the dynamic type (this holds true only for automatically
306 allocated objects but at the moment we devirtualize only these). We then
307 must detect that statements in section 2 change the dynamic type and can try
308 to derive the new type. That is enough and we can stop, we will never see
309 the calls into constructors of sub-objects in this code. Therefore we can
310 safely ignore all call statements that we traverse.
314 stmt_may_be_vtbl_ptr_store (gimple stmt
)
316 if (is_gimple_call (stmt
))
318 else if (is_gimple_assign (stmt
))
320 tree lhs
= gimple_assign_lhs (stmt
);
322 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
324 if (flag_strict_aliasing
325 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
328 if (TREE_CODE (lhs
) == COMPONENT_REF
329 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
331 /* In the future we might want to use get_base_ref_and_offset to find
332 if there is a field corresponding to the offset and if so, proceed
333 almost like if it was a component ref. */
339 /* Callback of walk_aliased_vdefs and a helper function for
340 detect_type_change to check whether a particular statement may modify
341 the virtual table pointer, and if possible also determine the new type of
342 the (sub-)object. It stores its result into DATA, which points to a
343 type_change_info structure. */
346 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
348 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
349 struct type_change_info
*tci
= (struct type_change_info
*) data
;
351 if (stmt_may_be_vtbl_ptr_store (stmt
))
353 tci
->type_maybe_changed
= true;
360 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
361 looking for assignments to its virtual table pointer. If it is, return true
362 and fill in the jump function JFUNC with relevant type information or set it
363 to unknown. ARG is the object itself (not a pointer to it, unless
364 dereferenced). BASE is the base of the memory access as returned by
365 get_ref_base_and_extent, as is the offset. */
368 detect_type_change (tree arg
, tree base
, gimple call
,
369 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
371 struct type_change_info tci
;
374 gcc_checking_assert (DECL_P (arg
)
375 || TREE_CODE (arg
) == MEM_REF
376 || handled_component_p (arg
));
377 /* Const calls cannot call virtual methods through VMT and so type changes do
379 if (!flag_devirtualize
|| !gimple_vuse (call
))
382 tci
.type_maybe_changed
= false;
387 ao
.size
= POINTER_SIZE
;
388 ao
.max_size
= ao
.size
;
389 ao
.ref_alias_set
= -1;
390 ao
.base_alias_set
= -1;
392 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
394 if (!tci
.type_maybe_changed
)
397 jfunc
->type
= IPA_JF_UNKNOWN
;
401 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
402 SSA name (its dereference will become the base and the offset is assumed to
406 detect_type_change_ssa (tree arg
, gimple call
, struct ipa_jump_func
*jfunc
)
408 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
409 if (!flag_devirtualize
410 || !POINTER_TYPE_P (TREE_TYPE (arg
))
411 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != RECORD_TYPE
)
414 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
415 build_int_cst (ptr_type_node
, 0));
417 return detect_type_change (arg
, arg
, call
, jfunc
, 0);
421 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
422 of an assignment statement STMT, try to find out whether NAME can be
423 described by a (possibly polynomial) pass-through jump-function or an
424 ancestor jump function and if so, write the appropriate function into
428 compute_complex_assign_jump_func (struct ipa_node_params
*info
,
429 struct ipa_jump_func
*jfunc
,
430 gimple call
, gimple stmt
, tree name
)
432 HOST_WIDE_INT offset
, size
, max_size
;
433 tree op1
, op2
, base
, ssa
;
436 op1
= gimple_assign_rhs1 (stmt
);
437 op2
= gimple_assign_rhs2 (stmt
);
439 if (TREE_CODE (op1
) == SSA_NAME
440 && SSA_NAME_IS_DEFAULT_DEF (op1
))
442 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
448 if (!is_gimple_ip_invariant (op2
)
449 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
450 && !useless_type_conversion_p (TREE_TYPE (name
),
454 jfunc
->type
= IPA_JF_PASS_THROUGH
;
455 jfunc
->value
.pass_through
.formal_id
= index
;
456 jfunc
->value
.pass_through
.operation
= gimple_assign_rhs_code (stmt
);
457 jfunc
->value
.pass_through
.operand
= op2
;
459 else if (gimple_assign_unary_nop_p (stmt
)
460 && !detect_type_change_ssa (op1
, call
, jfunc
))
462 jfunc
->type
= IPA_JF_PASS_THROUGH
;
463 jfunc
->value
.pass_through
.formal_id
= index
;
464 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
469 if (TREE_CODE (op1
) != ADDR_EXPR
)
471 op1
= TREE_OPERAND (op1
, 0);
472 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
474 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
475 if (TREE_CODE (base
) != MEM_REF
476 /* If this is a varying address, punt. */
480 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
481 ssa
= TREE_OPERAND (base
, 0);
482 if (TREE_CODE (ssa
) != SSA_NAME
483 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
487 /* Dynamic types are changed only in constructors and destructors and */
488 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
490 && !detect_type_change (op1
, base
, call
, jfunc
, offset
))
492 jfunc
->type
= IPA_JF_ANCESTOR
;
493 jfunc
->value
.ancestor
.formal_id
= index
;
494 jfunc
->value
.ancestor
.offset
= offset
;
495 jfunc
->value
.ancestor
.type
= TREE_TYPE (op1
);
499 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
502 iftmp.1_3 = &obj_2(D)->D.1762;
504 The base of the MEM_REF must be a default definition SSA NAME of a
505 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
506 whole MEM_REF expression is returned and the offset calculated from any
507 handled components and the MEM_REF itself is stored into *OFFSET. The whole
508 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
511 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
513 HOST_WIDE_INT size
, max_size
;
514 tree expr
, parm
, obj
;
516 if (!gimple_assign_single_p (assign
))
518 expr
= gimple_assign_rhs1 (assign
);
520 if (TREE_CODE (expr
) != ADDR_EXPR
)
522 expr
= TREE_OPERAND (expr
, 0);
524 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
526 if (TREE_CODE (expr
) != MEM_REF
527 /* If this is a varying address, punt. */
532 parm
= TREE_OPERAND (expr
, 0);
533 if (TREE_CODE (parm
) != SSA_NAME
534 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
535 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
538 *offset
+= mem_ref_offset (expr
).low
* BITS_PER_UNIT
;
544 /* Given that an actual argument is an SSA_NAME that is a result of a phi
545 statement PHI, try to find out whether NAME is in fact a
546 multiple-inheritance typecast from a descendant into an ancestor of a formal
547 parameter and thus can be described by an ancestor jump function and if so,
548 write the appropriate function into JFUNC.
550 Essentially we want to match the following pattern:
558 iftmp.1_3 = &obj_2(D)->D.1762;
561 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
562 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
566 compute_complex_ancestor_jump_func (struct ipa_node_params
*info
,
567 struct ipa_jump_func
*jfunc
,
568 gimple call
, gimple phi
)
570 HOST_WIDE_INT offset
;
572 basic_block phi_bb
, assign_bb
, cond_bb
;
573 tree tmp
, parm
, expr
, obj
;
576 if (gimple_phi_num_args (phi
) != 2)
579 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
580 tmp
= PHI_ARG_DEF (phi
, 0);
581 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
582 tmp
= PHI_ARG_DEF (phi
, 1);
585 if (TREE_CODE (tmp
) != SSA_NAME
586 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
587 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
588 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
591 assign
= SSA_NAME_DEF_STMT (tmp
);
592 assign_bb
= gimple_bb (assign
);
593 if (!single_pred_p (assign_bb
))
595 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
598 parm
= TREE_OPERAND (expr
, 0);
599 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
600 gcc_assert (index
>= 0);
602 cond_bb
= single_pred (assign_bb
);
603 cond
= last_stmt (cond_bb
);
605 || gimple_code (cond
) != GIMPLE_COND
606 || gimple_cond_code (cond
) != NE_EXPR
607 || gimple_cond_lhs (cond
) != parm
608 || !integer_zerop (gimple_cond_rhs (cond
)))
611 phi_bb
= gimple_bb (phi
);
612 for (i
= 0; i
< 2; i
++)
614 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
615 if (pred
!= assign_bb
&& pred
!= cond_bb
)
619 if (!detect_type_change (obj
, expr
, call
, jfunc
, offset
))
621 jfunc
->type
= IPA_JF_ANCESTOR
;
622 jfunc
->value
.ancestor
.formal_id
= index
;
623 jfunc
->value
.ancestor
.offset
= offset
;
624 jfunc
->value
.ancestor
.type
= TREE_TYPE (obj
);
628 /* Given OP which is passed as an actual argument to a called function,
629 determine if it is possible to construct a KNOWN_TYPE jump function for it
630 and if so, create one and store it to JFUNC. */
633 compute_known_type_jump_func (tree op
, struct ipa_jump_func
*jfunc
,
636 HOST_WIDE_INT offset
, size
, max_size
;
639 if (!flag_devirtualize
640 || TREE_CODE (op
) != ADDR_EXPR
641 || TREE_CODE (TREE_TYPE (TREE_TYPE (op
))) != RECORD_TYPE
)
644 op
= TREE_OPERAND (op
, 0);
645 base
= get_ref_base_and_extent (op
, &offset
, &size
, &max_size
);
649 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
650 || is_global_var (base
))
653 if (detect_type_change (op
, base
, call
, jfunc
, offset
))
656 binfo
= TYPE_BINFO (TREE_TYPE (base
));
659 binfo
= get_binfo_at_offset (binfo
, offset
, TREE_TYPE (op
));
662 jfunc
->type
= IPA_JF_KNOWN_TYPE
;
663 jfunc
->value
.base_binfo
= binfo
;
668 /* Determine the jump functions of scalar arguments. Scalar means SSA names
669 and constants of a number of selected types. INFO is the ipa_node_params
670 structure associated with the caller, FUNCTIONS is a pointer to an array of
671 jump function structures associated with CALL which is the call statement
675 compute_scalar_jump_functions (struct ipa_node_params
*info
,
676 struct ipa_edge_args
*args
,
682 for (num
= 0; num
< gimple_call_num_args (call
); num
++)
684 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, num
);
685 arg
= gimple_call_arg (call
, num
);
687 if (is_gimple_ip_invariant (arg
))
689 jfunc
->type
= IPA_JF_CONST
;
690 jfunc
->value
.constant
= arg
;
692 else if (TREE_CODE (arg
) == SSA_NAME
)
694 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
696 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
699 && !detect_type_change_ssa (arg
, call
, jfunc
))
701 jfunc
->type
= IPA_JF_PASS_THROUGH
;
702 jfunc
->value
.pass_through
.formal_id
= index
;
703 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
708 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
709 if (is_gimple_assign (stmt
))
710 compute_complex_assign_jump_func (info
, jfunc
, call
, stmt
, arg
);
711 else if (gimple_code (stmt
) == GIMPLE_PHI
)
712 compute_complex_ancestor_jump_func (info
, jfunc
, call
, stmt
);
716 compute_known_type_jump_func (arg
, jfunc
, call
);
720 /* Inspect the given TYPE and return true iff it has the same structure (the
721 same number of fields of the same types) as a C++ member pointer. If
722 METHOD_PTR and DELTA are non-NULL, store the trees representing the
723 corresponding fields there. */
726 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
730 if (TREE_CODE (type
) != RECORD_TYPE
)
733 fld
= TYPE_FIELDS (type
);
734 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
735 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
)
741 fld
= DECL_CHAIN (fld
);
742 if (!fld
|| INTEGRAL_TYPE_P (fld
))
747 if (DECL_CHAIN (fld
))
753 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
754 boolean variable pointed to by DATA. */
757 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
760 bool *b
= (bool *) data
;
765 /* Return true if the formal parameter PARM might have been modified in this
766 function before reaching the statement CALL. PARM_INFO is a pointer to a
767 structure containing intermediate information about PARM. */
770 is_parm_modified_before_call (struct param_analysis_info
*parm_info
,
771 gimple call
, tree parm
)
773 bool modified
= false;
776 if (parm_info
->modified
)
779 ao_ref_init (&refd
, parm
);
780 walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
781 &modified
, &parm_info
->visited_statements
);
784 parm_info
->modified
= true;
790 /* Go through arguments of the CALL and for every one that looks like a member
791 pointer, check whether it can be safely declared pass-through and if so,
792 mark that to the corresponding item of jump FUNCTIONS. Return true iff
793 there are non-pass-through member pointers within the arguments. INFO
794 describes formal parameters of the caller. PARMS_INFO is a pointer to a
795 vector containing intermediate information about each formal parameter. */
798 compute_pass_through_member_ptrs (struct ipa_node_params
*info
,
799 struct param_analysis_info
*parms_info
,
800 struct ipa_edge_args
*args
,
803 bool undecided_members
= false;
807 for (num
= 0; num
< gimple_call_num_args (call
); num
++)
809 arg
= gimple_call_arg (call
, num
);
811 if (type_like_member_ptr_p (TREE_TYPE (arg
), NULL
, NULL
))
813 if (TREE_CODE (arg
) == PARM_DECL
)
815 int index
= ipa_get_param_decl_index (info
, arg
);
817 gcc_assert (index
>=0);
818 if (!is_parm_modified_before_call (&parms_info
[index
], call
, arg
))
820 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
,
822 jfunc
->type
= IPA_JF_PASS_THROUGH
;
823 jfunc
->value
.pass_through
.formal_id
= index
;
824 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
827 undecided_members
= true;
830 undecided_members
= true;
834 return undecided_members
;
837 /* Simple function filling in a member pointer constant jump function (with PFN
838 and DELTA as the constant value) into JFUNC. */
841 fill_member_ptr_cst_jump_function (struct ipa_jump_func
*jfunc
,
842 tree pfn
, tree delta
)
844 jfunc
->type
= IPA_JF_CONST_MEMBER_PTR
;
845 jfunc
->value
.member_cst
.pfn
= pfn
;
846 jfunc
->value
.member_cst
.delta
= delta
;
849 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
850 return the rhs of its defining statement. */
853 get_ssa_def_if_simple_copy (tree rhs
)
855 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
857 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
859 if (gimple_assign_single_p (def_stmt
))
860 rhs
= gimple_assign_rhs1 (def_stmt
);
867 /* Traverse statements from CALL backwards, scanning whether the argument ARG
868 which is a member pointer is filled in with constant values. If it is, fill
869 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
870 fields of the record type of the member pointer. To give an example, we
871 look for a pattern looking like the following:
873 D.2515.__pfn ={v} printStuff;
874 D.2515.__delta ={v} 0;
875 i_1 = doprinting (D.2515); */
878 determine_cst_member_ptr (gimple call
, tree arg
, tree method_field
,
879 tree delta_field
, struct ipa_jump_func
*jfunc
)
881 gimple_stmt_iterator gsi
;
882 tree method
= NULL_TREE
;
883 tree delta
= NULL_TREE
;
885 gsi
= gsi_for_stmt (call
);
888 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
890 gimple stmt
= gsi_stmt (gsi
);
893 if (!stmt_may_clobber_ref_p (stmt
, arg
))
895 if (!gimple_assign_single_p (stmt
))
898 lhs
= gimple_assign_lhs (stmt
);
899 rhs
= gimple_assign_rhs1 (stmt
);
901 if (TREE_CODE (lhs
) != COMPONENT_REF
902 || TREE_OPERAND (lhs
, 0) != arg
)
905 fld
= TREE_OPERAND (lhs
, 1);
906 if (!method
&& fld
== method_field
)
908 rhs
= get_ssa_def_if_simple_copy (rhs
);
909 if (TREE_CODE (rhs
) == ADDR_EXPR
910 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == FUNCTION_DECL
911 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs
, 0))) == METHOD_TYPE
)
913 method
= TREE_OPERAND (rhs
, 0);
916 fill_member_ptr_cst_jump_function (jfunc
, rhs
, delta
);
924 if (!delta
&& fld
== delta_field
)
926 rhs
= get_ssa_def_if_simple_copy (rhs
);
927 if (TREE_CODE (rhs
) == INTEGER_CST
)
932 fill_member_ptr_cst_jump_function (jfunc
, rhs
, delta
);
944 /* Go through the arguments of the CALL and for every member pointer within
945 tries determine whether it is a constant. If it is, create a corresponding
946 constant jump function in FUNCTIONS which is an array of jump functions
947 associated with the call. */
950 compute_cst_member_ptr_arguments (struct ipa_edge_args
*args
,
954 tree arg
, method_field
, delta_field
;
956 for (num
= 0; num
< gimple_call_num_args (call
); num
++)
958 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, num
);
959 arg
= gimple_call_arg (call
, num
);
961 if (jfunc
->type
== IPA_JF_UNKNOWN
962 && type_like_member_ptr_p (TREE_TYPE (arg
), &method_field
,
964 determine_cst_member_ptr (call
, arg
, method_field
, delta_field
, jfunc
);
968 /* Compute jump function for all arguments of callsite CS and insert the
969 information in the jump_functions array in the ipa_edge_args corresponding
973 ipa_compute_jump_functions_for_edge (struct param_analysis_info
*parms_info
,
974 struct cgraph_edge
*cs
)
976 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
977 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
978 gimple call
= cs
->call_stmt
;
979 int arg_num
= gimple_call_num_args (call
);
981 if (arg_num
== 0 || args
->jump_functions
)
983 VEC_safe_grow_cleared (ipa_jump_func_t
, gc
, args
->jump_functions
, arg_num
);
985 /* We will deal with constants and SSA scalars first: */
986 compute_scalar_jump_functions (info
, args
, call
);
988 /* Let's check whether there are any potential member pointers and if so,
989 whether we can determine their functions as pass_through. */
990 if (!compute_pass_through_member_ptrs (info
, parms_info
, args
, call
))
993 /* Finally, let's check whether we actually pass a new constant member
995 compute_cst_member_ptr_arguments (args
, call
);
998 /* Compute jump functions for all edges - both direct and indirect - outgoing
999 from NODE. Also count the actual arguments in the process. */
1002 ipa_compute_jump_functions (struct cgraph_node
*node
,
1003 struct param_analysis_info
*parms_info
)
1005 struct cgraph_edge
*cs
;
1007 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
1009 struct cgraph_node
*callee
= cgraph_function_or_thunk_node (cs
->callee
,
1011 /* We do not need to bother analyzing calls to unknown
1012 functions unless they may become known during lto/whopr. */
1013 if (!callee
->analyzed
&& !flag_lto
)
1015 ipa_compute_jump_functions_for_edge (parms_info
, cs
);
1018 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
1019 ipa_compute_jump_functions_for_edge (parms_info
, cs
);
1022 /* If RHS looks like a rhs of a statement loading pfn from a member
1023 pointer formal parameter, return the parameter, otherwise return
1024 NULL. If USE_DELTA, then we look for a use of the delta field
1025 rather than the pfn. */
1028 ipa_get_member_ptr_load_param (tree rhs
, bool use_delta
)
1030 tree rec
, ref_field
, ref_offset
, fld
, fld_offset
, ptr_field
, delta_field
;
1032 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1034 ref_field
= TREE_OPERAND (rhs
, 1);
1035 rhs
= TREE_OPERAND (rhs
, 0);
1038 ref_field
= NULL_TREE
;
1039 if (TREE_CODE (rhs
) != MEM_REF
)
1041 rec
= TREE_OPERAND (rhs
, 0);
1042 if (TREE_CODE (rec
) != ADDR_EXPR
)
1044 rec
= TREE_OPERAND (rec
, 0);
1045 if (TREE_CODE (rec
) != PARM_DECL
1046 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1049 ref_offset
= TREE_OPERAND (rhs
, 1);
1053 if (integer_nonzerop (ref_offset
))
1061 return ref_field
== fld
? rec
: NULL_TREE
;
1065 fld_offset
= byte_position (delta_field
);
1067 fld_offset
= byte_position (ptr_field
);
1069 return tree_int_cst_equal (ref_offset
, fld_offset
) ? rec
: NULL_TREE
;
1072 /* If STMT looks like a statement loading a value from a member pointer formal
1073 parameter, this function returns that parameter. */
1076 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
)
1080 if (!gimple_assign_single_p (stmt
))
1083 rhs
= gimple_assign_rhs1 (stmt
);
1084 return ipa_get_member_ptr_load_param (rhs
, use_delta
);
1087 /* Returns true iff T is an SSA_NAME defined by a statement. */
1090 ipa_is_ssa_with_stmt_def (tree t
)
1092 if (TREE_CODE (t
) == SSA_NAME
1093 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1099 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1100 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1101 indirect call graph edge. */
1103 static struct cgraph_edge
*
1104 ipa_note_param_call (struct cgraph_node
*node
, int param_index
, gimple stmt
)
1106 struct cgraph_edge
*cs
;
1108 cs
= cgraph_edge (node
, stmt
);
1109 cs
->indirect_info
->param_index
= param_index
;
1110 cs
->indirect_info
->anc_offset
= 0;
1111 cs
->indirect_info
->polymorphic
= 0;
1115 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1116 (described by INFO). PARMS_INFO is a pointer to a vector containing
1117 intermediate information about each formal parameter. Currently it checks
1118 whether the call calls a pointer that is a formal parameter and if so, the
1119 parameter is marked with the called flag and an indirect call graph edge
1120 describing the call is created. This is very simple for ordinary pointers
1121 represented in SSA but not-so-nice when it comes to member pointers. The
1122 ugly part of this function does nothing more than trying to match the
1123 pattern of such a call. An example of such a pattern is the gimple dump
1124 below, the call is on the last line:
1127 f$__delta_5 = f.__delta;
1128 f$__pfn_24 = f.__pfn;
1132 f$__delta_5 = MEM[(struct *)&f];
1133 f$__pfn_24 = MEM[(struct *)&f + 4B];
1135 and a few lines below:
1138 D.2496_3 = (int) f$__pfn_24;
1139 D.2497_4 = D.2496_3 & 1;
1146 D.2500_7 = (unsigned int) f$__delta_5;
1147 D.2501_8 = &S + D.2500_7;
1148 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1149 D.2503_10 = *D.2502_9;
1150 D.2504_12 = f$__pfn_24 + -1;
1151 D.2505_13 = (unsigned int) D.2504_12;
1152 D.2506_14 = D.2503_10 + D.2505_13;
1153 D.2507_15 = *D.2506_14;
1154 iftmp.11_16 = (String:: *) D.2507_15;
1157 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1158 D.2500_19 = (unsigned int) f$__delta_5;
1159 D.2508_20 = &S + D.2500_19;
1160 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1162 Such patterns are results of simple calls to a member pointer:
1164 int doprinting (int (MyString::* f)(int) const)
1166 MyString S ("somestring");
1173 ipa_analyze_indirect_call_uses (struct cgraph_node
*node
,
1174 struct ipa_node_params
*info
,
1175 struct param_analysis_info
*parms_info
,
1176 gimple call
, tree target
)
1181 tree rec
, rec2
, cond
;
1184 basic_block bb
, virt_bb
, join
;
1186 if (SSA_NAME_IS_DEFAULT_DEF (target
))
1188 tree var
= SSA_NAME_VAR (target
);
1189 index
= ipa_get_param_decl_index (info
, var
);
1191 ipa_note_param_call (node
, index
, call
);
1195 /* Now we need to try to match the complex pattern of calling a member
1198 if (!POINTER_TYPE_P (TREE_TYPE (target
))
1199 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
1202 def
= SSA_NAME_DEF_STMT (target
);
1203 if (gimple_code (def
) != GIMPLE_PHI
)
1206 if (gimple_phi_num_args (def
) != 2)
1209 /* First, we need to check whether one of these is a load from a member
1210 pointer that is a parameter to this function. */
1211 n1
= PHI_ARG_DEF (def
, 0);
1212 n2
= PHI_ARG_DEF (def
, 1);
1213 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
1215 d1
= SSA_NAME_DEF_STMT (n1
);
1216 d2
= SSA_NAME_DEF_STMT (n2
);
1218 join
= gimple_bb (def
);
1219 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false)))
1221 if (ipa_get_stmt_member_ptr_load_param (d2
, false))
1224 bb
= EDGE_PRED (join
, 0)->src
;
1225 virt_bb
= gimple_bb (d2
);
1227 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false)))
1229 bb
= EDGE_PRED (join
, 1)->src
;
1230 virt_bb
= gimple_bb (d1
);
1235 /* Second, we need to check that the basic blocks are laid out in the way
1236 corresponding to the pattern. */
1238 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
1239 || single_pred (virt_bb
) != bb
1240 || single_succ (virt_bb
) != join
)
1243 /* Third, let's see that the branching is done depending on the least
1244 significant bit of the pfn. */
1246 branch
= last_stmt (bb
);
1247 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
1250 if ((gimple_cond_code (branch
) != NE_EXPR
1251 && gimple_cond_code (branch
) != EQ_EXPR
)
1252 || !integer_zerop (gimple_cond_rhs (branch
)))
1255 cond
= gimple_cond_lhs (branch
);
1256 if (!ipa_is_ssa_with_stmt_def (cond
))
1259 def
= SSA_NAME_DEF_STMT (cond
);
1260 if (!is_gimple_assign (def
)
1261 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
1262 || !integer_onep (gimple_assign_rhs2 (def
)))
1265 cond
= gimple_assign_rhs1 (def
);
1266 if (!ipa_is_ssa_with_stmt_def (cond
))
1269 def
= SSA_NAME_DEF_STMT (cond
);
1271 if (is_gimple_assign (def
)
1272 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
1274 cond
= gimple_assign_rhs1 (def
);
1275 if (!ipa_is_ssa_with_stmt_def (cond
))
1277 def
= SSA_NAME_DEF_STMT (cond
);
1280 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
1281 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1282 == ptrmemfunc_vbit_in_delta
));
1287 index
= ipa_get_param_decl_index (info
, rec
);
1288 if (index
>= 0 && !is_parm_modified_before_call (&parms_info
[index
],
1290 ipa_note_param_call (node
, index
, call
);
1295 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1296 object referenced in the expression is a formal parameter of the caller
1297 (described by INFO), create a call note for the statement. */
1300 ipa_analyze_virtual_call_uses (struct cgraph_node
*node
,
1301 struct ipa_node_params
*info
, gimple call
,
1304 struct cgraph_edge
*cs
;
1305 struct cgraph_indirect_call_info
*ii
;
1306 struct ipa_jump_func jfunc
;
1307 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
1309 HOST_WIDE_INT anc_offset
;
1311 if (!flag_devirtualize
)
1314 if (TREE_CODE (obj
) != SSA_NAME
)
1317 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
1319 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
1323 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
1324 gcc_assert (index
>= 0);
1325 if (detect_type_change_ssa (obj
, call
, &jfunc
))
1330 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
1333 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
1336 index
= ipa_get_param_decl_index (info
,
1337 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
1338 gcc_assert (index
>= 0);
1339 if (detect_type_change (obj
, expr
, call
, &jfunc
, anc_offset
))
1343 cs
= ipa_note_param_call (node
, index
, call
);
1344 ii
= cs
->indirect_info
;
1345 ii
->anc_offset
= anc_offset
;
1346 ii
->otr_token
= tree_low_cst (OBJ_TYPE_REF_TOKEN (target
), 1);
1347 ii
->otr_type
= TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target
)));
1348 ii
->polymorphic
= 1;
1351 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1352 of the caller (described by INFO). PARMS_INFO is a pointer to a vector
1353 containing intermediate information about each formal parameter. */
1356 ipa_analyze_call_uses (struct cgraph_node
*node
,
1357 struct ipa_node_params
*info
,
1358 struct param_analysis_info
*parms_info
, gimple call
)
1360 tree target
= gimple_call_fn (call
);
1364 if (TREE_CODE (target
) == SSA_NAME
)
1365 ipa_analyze_indirect_call_uses (node
, info
, parms_info
, call
, target
);
1366 else if (TREE_CODE (target
) == OBJ_TYPE_REF
)
1367 ipa_analyze_virtual_call_uses (node
, info
, call
, target
);
1371 /* Analyze the call statement STMT with respect to formal parameters (described
1372 in INFO) of caller given by NODE. Currently it only checks whether formal
1373 parameters are called. PARMS_INFO is a pointer to a vector containing
1374 intermediate information about each formal parameter. */
1377 ipa_analyze_stmt_uses (struct cgraph_node
*node
, struct ipa_node_params
*info
,
1378 struct param_analysis_info
*parms_info
, gimple stmt
)
1380 if (is_gimple_call (stmt
))
1381 ipa_analyze_call_uses (node
, info
, parms_info
, stmt
);
1384 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1385 If OP is a parameter declaration, mark it as used in the info structure
1389 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED
,
1390 tree op
, void *data
)
1392 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
1394 op
= get_base_address (op
);
1396 && TREE_CODE (op
) == PARM_DECL
)
1398 int index
= ipa_get_param_decl_index (info
, op
);
1399 gcc_assert (index
>= 0);
1400 ipa_set_param_used (info
, index
, true);
1406 /* Scan the function body of NODE and inspect the uses of formal parameters.
1407 Store the findings in various structures of the associated ipa_node_params
1408 structure, such as parameter flags, notes etc. PARMS_INFO is a pointer to a
1409 vector containing intermediate information about each formal parameter. */
1412 ipa_analyze_params_uses (struct cgraph_node
*node
,
1413 struct param_analysis_info
*parms_info
)
1415 tree decl
= node
->decl
;
1417 struct function
*func
;
1418 gimple_stmt_iterator gsi
;
1419 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
1422 if (ipa_get_param_count (info
) == 0 || info
->uses_analysis_done
)
1425 for (i
= 0; i
< ipa_get_param_count (info
); i
++)
1427 tree parm
= ipa_get_param (info
, i
);
1428 /* For SSA regs see if parameter is used. For non-SSA we compute
1429 the flag during modification analysis. */
1430 if (is_gimple_reg (parm
)
1431 && gimple_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
))
1432 ipa_set_param_used (info
, i
, true);
1435 func
= DECL_STRUCT_FUNCTION (decl
);
1436 FOR_EACH_BB_FN (bb
, func
)
1438 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1440 gimple stmt
= gsi_stmt (gsi
);
1442 if (is_gimple_debug (stmt
))
1445 ipa_analyze_stmt_uses (node
, info
, parms_info
, stmt
);
1446 walk_stmt_load_store_addr_ops (stmt
, info
,
1447 visit_ref_for_mod_analysis
,
1448 visit_ref_for_mod_analysis
,
1449 visit_ref_for_mod_analysis
);
1451 for (gsi
= gsi_start (phi_nodes (bb
)); !gsi_end_p (gsi
); gsi_next (&gsi
))
1452 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), info
,
1453 visit_ref_for_mod_analysis
,
1454 visit_ref_for_mod_analysis
,
1455 visit_ref_for_mod_analysis
);
1458 info
->uses_analysis_done
= 1;
1461 /* Initialize the array describing properties of of formal parameters
1462 of NODE, analyze their uses and compute jump functions associated
1463 with actual arguments of calls from within NODE. */
1466 ipa_analyze_node (struct cgraph_node
*node
)
1468 struct ipa_node_params
*info
;
1469 struct param_analysis_info
*parms_info
;
1472 ipa_check_create_node_params ();
1473 ipa_check_create_edge_args ();
1474 info
= IPA_NODE_REF (node
);
1475 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
1476 current_function_decl
= node
->decl
;
1477 ipa_initialize_node_params (node
);
1479 param_count
= ipa_get_param_count (info
);
1480 parms_info
= XALLOCAVEC (struct param_analysis_info
, param_count
);
1481 memset (parms_info
, 0, sizeof (struct param_analysis_info
) * param_count
);
1483 ipa_analyze_params_uses (node
, parms_info
);
1484 ipa_compute_jump_functions (node
, parms_info
);
1486 for (i
= 0; i
< param_count
; i
++)
1487 if (parms_info
[i
].visited_statements
)
1488 BITMAP_FREE (parms_info
[i
].visited_statements
);
1490 current_function_decl
= NULL
;
1495 /* Update the jump function DST when the call graph edge corresponding to SRC is
1496 is being inlined, knowing that DST is of type ancestor and src of known
1500 combine_known_type_and_ancestor_jfs (struct ipa_jump_func
*src
,
1501 struct ipa_jump_func
*dst
)
1505 new_binfo
= get_binfo_at_offset (src
->value
.base_binfo
,
1506 dst
->value
.ancestor
.offset
,
1507 dst
->value
.ancestor
.type
);
1510 dst
->type
= IPA_JF_KNOWN_TYPE
;
1511 dst
->value
.base_binfo
= new_binfo
;
1514 dst
->type
= IPA_JF_UNKNOWN
;
1517 /* Update the jump functions associated with call graph edge E when the call
1518 graph edge CS is being inlined, assuming that E->caller is already (possibly
1519 indirectly) inlined into CS->callee and that E has not been inlined. */
1522 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
1523 struct cgraph_edge
*e
)
1525 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
1526 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
1527 int count
= ipa_get_cs_argument_count (args
);
1530 for (i
= 0; i
< count
; i
++)
1532 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
1534 if (dst
->type
== IPA_JF_ANCESTOR
)
1536 struct ipa_jump_func
*src
;
1538 /* Variable number of arguments can cause havoc if we try to access
1539 one that does not exist in the inlined edge. So make sure we
1541 if (dst
->value
.ancestor
.formal_id
>= ipa_get_cs_argument_count (top
))
1543 dst
->type
= IPA_JF_UNKNOWN
;
1547 src
= ipa_get_ith_jump_func (top
, dst
->value
.ancestor
.formal_id
);
1548 if (src
->type
== IPA_JF_KNOWN_TYPE
)
1549 combine_known_type_and_ancestor_jfs (src
, dst
);
1550 else if (src
->type
== IPA_JF_PASS_THROUGH
1551 && src
->value
.pass_through
.operation
== NOP_EXPR
)
1552 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
1553 else if (src
->type
== IPA_JF_ANCESTOR
)
1555 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
1556 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
1559 dst
->type
= IPA_JF_UNKNOWN
;
1561 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
1563 struct ipa_jump_func
*src
;
1564 /* We must check range due to calls with variable number of arguments
1565 and we cannot combine jump functions with operations. */
1566 if (dst
->value
.pass_through
.operation
== NOP_EXPR
1567 && (dst
->value
.pass_through
.formal_id
1568 < ipa_get_cs_argument_count (top
)))
1570 src
= ipa_get_ith_jump_func (top
,
1571 dst
->value
.pass_through
.formal_id
);
1575 dst
->type
= IPA_JF_UNKNOWN
;
1580 /* If TARGET is an addr_expr of a function declaration, make it the destination
1581 of an indirect edge IE and return the edge. Otherwise, return NULL. */
1583 struct cgraph_edge
*
1584 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
)
1586 struct cgraph_node
*callee
;
1588 if (TREE_CODE (target
) == ADDR_EXPR
)
1589 target
= TREE_OPERAND (target
, 0);
1590 if (TREE_CODE (target
) != FUNCTION_DECL
)
1592 callee
= cgraph_get_node (target
);
1595 ipa_check_create_node_params ();
1597 /* We can not make edges to inline clones. It is bug that someone removed
1598 the cgraph node too early. */
1599 gcc_assert (!callee
->global
.inlined_to
);
1601 cgraph_make_edge_direct (ie
, callee
);
1604 fprintf (dump_file
, "ipa-prop: Discovered %s call to a known target "
1605 "(%s/%i -> %s/%i), for stmt ",
1606 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
1607 cgraph_node_name (ie
->caller
), ie
->caller
->uid
,
1608 cgraph_node_name (ie
->callee
), ie
->callee
->uid
);
1610 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
1612 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
1614 callee
= cgraph_function_or_thunk_node (callee
, NULL
);
1619 /* Try to find a destination for indirect edge IE that corresponds to a simple
1620 call or a call of a member function pointer and where the destination is a
1621 pointer formal parameter described by jump function JFUNC. If it can be
1622 determined, return the newly direct edge, otherwise return NULL. */
1624 static struct cgraph_edge
*
1625 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
1626 struct ipa_jump_func
*jfunc
)
1630 if (jfunc
->type
== IPA_JF_CONST
)
1631 target
= jfunc
->value
.constant
;
1632 else if (jfunc
->type
== IPA_JF_CONST_MEMBER_PTR
)
1633 target
= jfunc
->value
.member_cst
.pfn
;
1637 return ipa_make_edge_direct_to_target (ie
, target
);
1640 /* Try to find a destination for indirect edge IE that corresponds to a
1641 virtual call based on a formal parameter which is described by jump
1642 function JFUNC and if it can be determined, make it direct and return the
1643 direct edge. Otherwise, return NULL. */
1645 static struct cgraph_edge
*
1646 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
1647 struct ipa_jump_func
*jfunc
)
1649 tree binfo
, type
, target
;
1650 HOST_WIDE_INT token
;
1652 if (jfunc
->type
== IPA_JF_KNOWN_TYPE
)
1653 binfo
= jfunc
->value
.base_binfo
;
1660 token
= ie
->indirect_info
->otr_token
;
1661 type
= ie
->indirect_info
->otr_type
;
1662 binfo
= get_binfo_at_offset (binfo
, ie
->indirect_info
->anc_offset
, type
);
1664 target
= gimple_get_virt_method_for_binfo (token
, binfo
);
1669 return ipa_make_edge_direct_to_target (ie
, target
);
1674 /* Update the param called notes associated with NODE when CS is being inlined,
1675 assuming NODE is (potentially indirectly) inlined into CS->callee.
1676 Moreover, if the callee is discovered to be constant, create a new cgraph
1677 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1678 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1681 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
1682 struct cgraph_node
*node
,
1683 VEC (cgraph_edge_p
, heap
) **new_edges
)
1685 struct ipa_edge_args
*top
;
1686 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
1689 ipa_check_create_edge_args ();
1690 top
= IPA_EDGE_REF (cs
);
1692 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
1694 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
1695 struct ipa_jump_func
*jfunc
;
1697 next_ie
= ie
->next_callee
;
1699 if (ici
->param_index
== -1)
1702 /* We must check range due to calls with variable number of arguments: */
1703 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
1705 ici
->param_index
= -1;
1709 jfunc
= ipa_get_ith_jump_func (top
, ici
->param_index
);
1710 if (jfunc
->type
== IPA_JF_PASS_THROUGH
1711 && jfunc
->value
.pass_through
.operation
== NOP_EXPR
)
1712 ici
->param_index
= jfunc
->value
.pass_through
.formal_id
;
1713 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
1715 ici
->param_index
= jfunc
->value
.ancestor
.formal_id
;
1716 ici
->anc_offset
+= jfunc
->value
.ancestor
.offset
;
1719 /* Either we can find a destination for this edge now or never. */
1720 ici
->param_index
= -1;
1722 if (!flag_indirect_inlining
)
1725 if (ici
->polymorphic
)
1726 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
);
1728 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
);
1730 if (new_direct_edge
)
1732 new_direct_edge
->indirect_inlining_edge
= 1;
1735 VEC_safe_push (cgraph_edge_p
, heap
, *new_edges
,
1737 top
= IPA_EDGE_REF (cs
);
1746 /* Recursively traverse subtree of NODE (including node) made of inlined
1747 cgraph_edges when CS has been inlined and invoke
1748 update_indirect_edges_after_inlining on all nodes and
1749 update_jump_functions_after_inlining on all non-inlined edges that lead out
1750 of this subtree. Newly discovered indirect edges will be added to
1751 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1755 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
1756 struct cgraph_node
*node
,
1757 VEC (cgraph_edge_p
, heap
) **new_edges
)
1759 struct cgraph_edge
*e
;
1762 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
1764 for (e
= node
->callees
; e
; e
= e
->next_callee
)
1765 if (!e
->inline_failed
)
1766 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
1768 update_jump_functions_after_inlining (cs
, e
);
1769 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
1770 update_jump_functions_after_inlining (cs
, e
);
1775 /* Update jump functions and call note functions on inlining the call site CS.
1776 CS is expected to lead to a node already cloned by
1777 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1778 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1782 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
1783 VEC (cgraph_edge_p
, heap
) **new_edges
)
1786 /* Do nothing if the preparation phase has not been carried out yet
1787 (i.e. during early inlining). */
1788 if (!ipa_node_params_vector
)
1790 gcc_assert (ipa_edge_args_vector
);
1792 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
1794 /* We do not keep jump functions of inlined edges up to date. Better to free
1795 them so we do not access them accidentally. */
1796 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
1800 /* Frees all dynamically allocated structures that the argument info points
1804 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
1806 if (args
->jump_functions
)
1807 ggc_free (args
->jump_functions
);
1809 memset (args
, 0, sizeof (*args
));
1812 /* Free all ipa_edge structures. */
1815 ipa_free_all_edge_args (void)
1818 struct ipa_edge_args
*args
;
1820 FOR_EACH_VEC_ELT (ipa_edge_args_t
, ipa_edge_args_vector
, i
, args
)
1821 ipa_free_edge_args_substructures (args
);
1823 VEC_free (ipa_edge_args_t
, gc
, ipa_edge_args_vector
);
1824 ipa_edge_args_vector
= NULL
;
1827 /* Frees all dynamically allocated structures that the param info points
1831 ipa_free_node_params_substructures (struct ipa_node_params
*info
)
1833 VEC_free (ipa_param_descriptor_t
, heap
, info
->descriptors
);
1834 free (info
->lattices
);
1835 /* Lattice values and their sources are deallocated with their alocation
1837 VEC_free (tree
, heap
, info
->known_vals
);
1838 memset (info
, 0, sizeof (*info
));
1841 /* Free all ipa_node_params structures. */
1844 ipa_free_all_node_params (void)
1847 struct ipa_node_params
*info
;
1849 FOR_EACH_VEC_ELT (ipa_node_params_t
, ipa_node_params_vector
, i
, info
)
1850 ipa_free_node_params_substructures (info
);
1852 VEC_free (ipa_node_params_t
, heap
, ipa_node_params_vector
);
1853 ipa_node_params_vector
= NULL
;
1856 /* Hook that is called by cgraph.c when an edge is removed. */
1859 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
1861 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1862 if (VEC_length (ipa_edge_args_t
, ipa_edge_args_vector
)
1863 <= (unsigned)cs
->uid
)
1865 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
1868 /* Hook that is called by cgraph.c when a node is removed. */
1871 ipa_node_removal_hook (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
1873 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1874 if (VEC_length (ipa_node_params_t
, ipa_node_params_vector
)
1875 <= (unsigned)node
->uid
)
1877 ipa_free_node_params_substructures (IPA_NODE_REF (node
));
1880 /* Hook that is called by cgraph.c when a node is duplicated. */
1883 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
1884 __attribute__((unused
)) void *data
)
1886 struct ipa_edge_args
*old_args
, *new_args
;
1888 ipa_check_create_edge_args ();
1890 old_args
= IPA_EDGE_REF (src
);
1891 new_args
= IPA_EDGE_REF (dst
);
1893 new_args
->jump_functions
= VEC_copy (ipa_jump_func_t
, gc
,
1894 old_args
->jump_functions
);
1897 /* Hook that is called by cgraph.c when a node is duplicated. */
1900 ipa_node_duplication_hook (struct cgraph_node
*src
, struct cgraph_node
*dst
,
1901 ATTRIBUTE_UNUSED
void *data
)
1903 struct ipa_node_params
*old_info
, *new_info
;
1905 ipa_check_create_node_params ();
1906 old_info
= IPA_NODE_REF (src
);
1907 new_info
= IPA_NODE_REF (dst
);
1909 new_info
->descriptors
= VEC_copy (ipa_param_descriptor_t
, heap
,
1910 old_info
->descriptors
);
1911 new_info
->lattices
= NULL
;
1912 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
1914 new_info
->uses_analysis_done
= old_info
->uses_analysis_done
;
1915 new_info
->node_enqueued
= old_info
->node_enqueued
;
1919 /* Analyze newly added function into callgraph. */
1922 ipa_add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
1924 ipa_analyze_node (node
);
1927 /* Register our cgraph hooks if they are not already there. */
1930 ipa_register_cgraph_hooks (void)
1932 if (!edge_removal_hook_holder
)
1933 edge_removal_hook_holder
=
1934 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
1935 if (!node_removal_hook_holder
)
1936 node_removal_hook_holder
=
1937 cgraph_add_node_removal_hook (&ipa_node_removal_hook
, NULL
);
1938 if (!edge_duplication_hook_holder
)
1939 edge_duplication_hook_holder
=
1940 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
1941 if (!node_duplication_hook_holder
)
1942 node_duplication_hook_holder
=
1943 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook
, NULL
);
1944 function_insertion_hook_holder
=
1945 cgraph_add_function_insertion_hook (&ipa_add_new_function
, NULL
);
1948 /* Unregister our cgraph hooks if they are not already there. */
1951 ipa_unregister_cgraph_hooks (void)
1953 cgraph_remove_edge_removal_hook (edge_removal_hook_holder
);
1954 edge_removal_hook_holder
= NULL
;
1955 cgraph_remove_node_removal_hook (node_removal_hook_holder
);
1956 node_removal_hook_holder
= NULL
;
1957 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder
);
1958 edge_duplication_hook_holder
= NULL
;
1959 cgraph_remove_node_duplication_hook (node_duplication_hook_holder
);
1960 node_duplication_hook_holder
= NULL
;
1961 cgraph_remove_function_insertion_hook (function_insertion_hook_holder
);
1962 function_insertion_hook_holder
= NULL
;
1965 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1966 longer needed after ipa-cp. */
1969 ipa_free_all_structures_after_ipa_cp (void)
1973 ipa_free_all_edge_args ();
1974 ipa_free_all_node_params ();
1975 free_alloc_pool (ipcp_sources_pool
);
1976 free_alloc_pool (ipcp_values_pool
);
1977 ipa_unregister_cgraph_hooks ();
1981 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1982 longer needed after indirect inlining. */
1985 ipa_free_all_structures_after_iinln (void)
1987 ipa_free_all_edge_args ();
1988 ipa_free_all_node_params ();
1989 ipa_unregister_cgraph_hooks ();
1990 if (ipcp_sources_pool
)
1991 free_alloc_pool (ipcp_sources_pool
);
1992 if (ipcp_values_pool
)
1993 free_alloc_pool (ipcp_values_pool
);
1996 /* Print ipa_tree_map data structures of all functions in the
2000 ipa_print_node_params (FILE * f
, struct cgraph_node
*node
)
2004 struct ipa_node_params
*info
;
2006 if (!node
->analyzed
)
2008 info
= IPA_NODE_REF (node
);
2009 fprintf (f
, " function %s parameter descriptors:\n",
2010 cgraph_node_name (node
));
2011 count
= ipa_get_param_count (info
);
2012 for (i
= 0; i
< count
; i
++)
2014 temp
= ipa_get_param (info
, i
);
2015 if (TREE_CODE (temp
) == PARM_DECL
)
2016 fprintf (f
, " param %d : %s", i
,
2018 ? (*lang_hooks
.decl_printable_name
) (temp
, 2)
2020 if (ipa_is_param_used (info
, i
))
2021 fprintf (f
, " used");
2026 /* Print ipa_tree_map data structures of all functions in the
2030 ipa_print_all_params (FILE * f
)
2032 struct cgraph_node
*node
;
2034 fprintf (f
, "\nFunction parameters:\n");
2035 for (node
= cgraph_nodes
; node
; node
= node
->next
)
2036 ipa_print_node_params (f
, node
);
2039 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2042 ipa_get_vector_of_formal_parms (tree fndecl
)
2044 VEC(tree
, heap
) *args
;
2048 count
= count_formal_params (fndecl
);
2049 args
= VEC_alloc (tree
, heap
, count
);
2050 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
2051 VEC_quick_push (tree
, args
, parm
);
2056 /* Return a heap allocated vector containing types of formal parameters of
2057 function type FNTYPE. */
2059 static inline VEC(tree
, heap
) *
2060 get_vector_of_formal_parm_types (tree fntype
)
2062 VEC(tree
, heap
) *types
;
2066 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
2069 types
= VEC_alloc (tree
, heap
, count
);
2070 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
2071 VEC_quick_push (tree
, types
, TREE_VALUE (t
));
2076 /* Modify the function declaration FNDECL and its type according to the plan in
2077 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2078 to reflect the actual parameters being modified which are determined by the
2079 base_index field. */
2082 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
,
2083 const char *synth_parm_prefix
)
2085 VEC(tree
, heap
) *oparms
, *otypes
;
2086 tree orig_type
, new_type
= NULL
;
2087 tree old_arg_types
, t
, new_arg_types
= NULL
;
2088 tree parm
, *link
= &DECL_ARGUMENTS (fndecl
);
2089 int i
, len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
2090 tree new_reversed
= NULL
;
2091 bool care_for_types
, last_parm_void
;
2093 if (!synth_parm_prefix
)
2094 synth_parm_prefix
= "SYNTH";
2096 oparms
= ipa_get_vector_of_formal_parms (fndecl
);
2097 orig_type
= TREE_TYPE (fndecl
);
2098 old_arg_types
= TYPE_ARG_TYPES (orig_type
);
2100 /* The following test is an ugly hack, some functions simply don't have any
2101 arguments in their type. This is probably a bug but well... */
2102 care_for_types
= (old_arg_types
!= NULL_TREE
);
2105 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
2107 otypes
= get_vector_of_formal_parm_types (orig_type
);
2109 gcc_assert (VEC_length (tree
, oparms
) + 1 == VEC_length (tree
, otypes
));
2111 gcc_assert (VEC_length (tree
, oparms
) == VEC_length (tree
, otypes
));
2115 last_parm_void
= false;
2119 for (i
= 0; i
< len
; i
++)
2121 struct ipa_parm_adjustment
*adj
;
2124 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
2125 parm
= VEC_index (tree
, oparms
, adj
->base_index
);
2128 if (adj
->copy_param
)
2131 new_arg_types
= tree_cons (NULL_TREE
, VEC_index (tree
, otypes
,
2135 link
= &DECL_CHAIN (parm
);
2137 else if (!adj
->remove_param
)
2143 ptype
= build_pointer_type (adj
->type
);
2148 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
2150 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
2152 DECL_NAME (new_parm
) = create_tmp_var_name (synth_parm_prefix
);
2154 DECL_ARTIFICIAL (new_parm
) = 1;
2155 DECL_ARG_TYPE (new_parm
) = ptype
;
2156 DECL_CONTEXT (new_parm
) = fndecl
;
2157 TREE_USED (new_parm
) = 1;
2158 DECL_IGNORED_P (new_parm
) = 1;
2159 layout_decl (new_parm
, 0);
2161 add_referenced_var (new_parm
);
2162 mark_sym_for_renaming (new_parm
);
2164 adj
->reduction
= new_parm
;
2168 link
= &DECL_CHAIN (new_parm
);
2176 new_reversed
= nreverse (new_arg_types
);
2180 TREE_CHAIN (new_arg_types
) = void_list_node
;
2182 new_reversed
= void_list_node
;
2186 /* Use copy_node to preserve as much as possible from original type
2187 (debug info, attribute lists etc.)
2188 Exception is METHOD_TYPEs must have THIS argument.
2189 When we are asked to remove it, we need to build new FUNCTION_TYPE
2191 if (TREE_CODE (orig_type
) != METHOD_TYPE
2192 || (VEC_index (ipa_parm_adjustment_t
, adjustments
, 0)->copy_param
2193 && VEC_index (ipa_parm_adjustment_t
, adjustments
, 0)->base_index
== 0))
2195 new_type
= build_distinct_type_copy (orig_type
);
2196 TYPE_ARG_TYPES (new_type
) = new_reversed
;
2201 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
2203 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
2204 DECL_VINDEX (fndecl
) = NULL_TREE
;
2207 /* When signature changes, we need to clear builtin info. */
2208 if (DECL_BUILT_IN (fndecl
))
2210 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
2211 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
2214 /* This is a new type, not a copy of an old type. Need to reassociate
2215 variants. We can handle everything except the main variant lazily. */
2216 t
= TYPE_MAIN_VARIANT (orig_type
);
2219 TYPE_MAIN_VARIANT (new_type
) = t
;
2220 TYPE_NEXT_VARIANT (new_type
) = TYPE_NEXT_VARIANT (t
);
2221 TYPE_NEXT_VARIANT (t
) = new_type
;
2225 TYPE_MAIN_VARIANT (new_type
) = new_type
;
2226 TYPE_NEXT_VARIANT (new_type
) = NULL
;
2229 TREE_TYPE (fndecl
) = new_type
;
2230 DECL_VIRTUAL_P (fndecl
) = 0;
2232 VEC_free (tree
, heap
, otypes
);
2233 VEC_free (tree
, heap
, oparms
);
2236 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2237 If this is a directly recursive call, CS must be NULL. Otherwise it must
2238 contain the corresponding call graph edge. */
2241 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gimple stmt
,
2242 ipa_parm_adjustment_vec adjustments
)
2244 VEC(tree
, heap
) *vargs
;
2245 VEC(tree
, gc
) **debug_args
= NULL
;
2247 gimple_stmt_iterator gsi
;
2251 len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
2252 vargs
= VEC_alloc (tree
, heap
, len
);
2253 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
2255 gsi
= gsi_for_stmt (stmt
);
2256 for (i
= 0; i
< len
; i
++)
2258 struct ipa_parm_adjustment
*adj
;
2260 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
2262 if (adj
->copy_param
)
2264 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
2266 VEC_quick_push (tree
, vargs
, arg
);
2268 else if (!adj
->remove_param
)
2270 tree expr
, base
, off
;
2273 /* We create a new parameter out of the value of the old one, we can
2274 do the following kind of transformations:
2276 - A scalar passed by reference is converted to a scalar passed by
2277 value. (adj->by_ref is false and the type of the original
2278 actual argument is a pointer to a scalar).
2280 - A part of an aggregate is passed instead of the whole aggregate.
2281 The part can be passed either by value or by reference, this is
2282 determined by value of adj->by_ref. Moreover, the code below
2283 handles both situations when the original aggregate is passed by
2284 value (its type is not a pointer) and when it is passed by
2285 reference (it is a pointer to an aggregate).
2287 When the new argument is passed by reference (adj->by_ref is true)
2288 it must be a part of an aggregate and therefore we form it by
2289 simply taking the address of a reference inside the original
2292 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
2293 base
= gimple_call_arg (stmt
, adj
->base_index
);
2294 loc
= EXPR_LOCATION (base
);
2296 if (TREE_CODE (base
) != ADDR_EXPR
2297 && POINTER_TYPE_P (TREE_TYPE (base
)))
2298 off
= build_int_cst (adj
->alias_ptr_type
,
2299 adj
->offset
/ BITS_PER_UNIT
);
2302 HOST_WIDE_INT base_offset
;
2305 if (TREE_CODE (base
) == ADDR_EXPR
)
2306 base
= TREE_OPERAND (base
, 0);
2308 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
2309 /* Aggregate arguments can have non-invariant addresses. */
2312 base
= build_fold_addr_expr (prev_base
);
2313 off
= build_int_cst (adj
->alias_ptr_type
,
2314 adj
->offset
/ BITS_PER_UNIT
);
2316 else if (TREE_CODE (base
) == MEM_REF
)
2318 off
= build_int_cst (adj
->alias_ptr_type
,
2320 + adj
->offset
/ BITS_PER_UNIT
);
2321 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
2323 base
= TREE_OPERAND (base
, 0);
2327 off
= build_int_cst (adj
->alias_ptr_type
,
2329 + adj
->offset
/ BITS_PER_UNIT
);
2330 base
= build_fold_addr_expr (base
);
2334 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
2336 expr
= build_fold_addr_expr (expr
);
2338 expr
= force_gimple_operand_gsi (&gsi
, expr
,
2340 || is_gimple_reg_type (adj
->type
),
2341 NULL
, true, GSI_SAME_STMT
);
2342 VEC_quick_push (tree
, vargs
, expr
);
2344 if (!adj
->copy_param
&& MAY_HAVE_DEBUG_STMTS
)
2347 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
2350 arg
= gimple_call_arg (stmt
, adj
->base_index
);
2351 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
2353 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
2355 arg
= fold_convert_loc (gimple_location (stmt
),
2356 TREE_TYPE (origin
), arg
);
2358 if (debug_args
== NULL
)
2359 debug_args
= decl_debug_args_insert (callee_decl
);
2360 for (ix
= 0; VEC_iterate (tree
, *debug_args
, ix
, ddecl
); ix
+= 2)
2361 if (ddecl
== origin
)
2363 ddecl
= VEC_index (tree
, *debug_args
, ix
+ 1);
2368 ddecl
= make_node (DEBUG_EXPR_DECL
);
2369 DECL_ARTIFICIAL (ddecl
) = 1;
2370 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
2371 DECL_MODE (ddecl
) = DECL_MODE (origin
);
2373 VEC_safe_push (tree
, gc
, *debug_args
, origin
);
2374 VEC_safe_push (tree
, gc
, *debug_args
, ddecl
);
2376 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
),
2378 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
2382 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2384 fprintf (dump_file
, "replacing stmt:");
2385 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
2388 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
2389 VEC_free (tree
, heap
, vargs
);
2390 if (gimple_call_lhs (stmt
))
2391 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
2393 gimple_set_block (new_stmt
, gimple_block (stmt
));
2394 if (gimple_has_location (stmt
))
2395 gimple_set_location (new_stmt
, gimple_location (stmt
));
2396 gimple_call_copy_flags (new_stmt
, stmt
);
2397 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
2399 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2401 fprintf (dump_file
, "with stmt:");
2402 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
2403 fprintf (dump_file
, "\n");
2405 gsi_replace (&gsi
, new_stmt
, true);
2407 cgraph_set_call_stmt (cs
, new_stmt
);
2408 update_ssa (TODO_update_ssa
);
2409 free_dominance_info (CDI_DOMINATORS
);
2412 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2415 index_in_adjustments_multiple_times_p (int base_index
,
2416 ipa_parm_adjustment_vec adjustments
)
2418 int i
, len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
2421 for (i
= 0; i
< len
; i
++)
2423 struct ipa_parm_adjustment
*adj
;
2424 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
2426 if (adj
->base_index
== base_index
)
2438 /* Return adjustments that should have the same effect on function parameters
2439 and call arguments as if they were first changed according to adjustments in
2440 INNER and then by adjustments in OUTER. */
2442 ipa_parm_adjustment_vec
2443 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
2444 ipa_parm_adjustment_vec outer
)
2446 int i
, outlen
= VEC_length (ipa_parm_adjustment_t
, outer
);
2447 int inlen
= VEC_length (ipa_parm_adjustment_t
, inner
);
2449 ipa_parm_adjustment_vec adjustments
, tmp
;
2451 tmp
= VEC_alloc (ipa_parm_adjustment_t
, heap
, inlen
);
2452 for (i
= 0; i
< inlen
; i
++)
2454 struct ipa_parm_adjustment
*n
;
2455 n
= VEC_index (ipa_parm_adjustment_t
, inner
, i
);
2457 if (n
->remove_param
)
2460 VEC_quick_push (ipa_parm_adjustment_t
, tmp
, n
);
2463 adjustments
= VEC_alloc (ipa_parm_adjustment_t
, heap
, outlen
+ removals
);
2464 for (i
= 0; i
< outlen
; i
++)
2466 struct ipa_parm_adjustment
*r
;
2467 struct ipa_parm_adjustment
*out
= VEC_index (ipa_parm_adjustment_t
,
2469 struct ipa_parm_adjustment
*in
= VEC_index (ipa_parm_adjustment_t
, tmp
,
2472 gcc_assert (!in
->remove_param
);
2473 if (out
->remove_param
)
2475 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
2477 r
= VEC_quick_push (ipa_parm_adjustment_t
, adjustments
, NULL
);
2478 memset (r
, 0, sizeof (*r
));
2479 r
->remove_param
= true;
2484 r
= VEC_quick_push (ipa_parm_adjustment_t
, adjustments
, NULL
);
2485 memset (r
, 0, sizeof (*r
));
2486 r
->base_index
= in
->base_index
;
2487 r
->type
= out
->type
;
2489 /* FIXME: Create nonlocal value too. */
2491 if (in
->copy_param
&& out
->copy_param
)
2492 r
->copy_param
= true;
2493 else if (in
->copy_param
)
2494 r
->offset
= out
->offset
;
2495 else if (out
->copy_param
)
2496 r
->offset
= in
->offset
;
2498 r
->offset
= in
->offset
+ out
->offset
;
2501 for (i
= 0; i
< inlen
; i
++)
2503 struct ipa_parm_adjustment
*n
= VEC_index (ipa_parm_adjustment_t
,
2506 if (n
->remove_param
)
2507 VEC_quick_push (ipa_parm_adjustment_t
, adjustments
, n
);
2510 VEC_free (ipa_parm_adjustment_t
, heap
, tmp
);
2514 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2515 friendly way, assuming they are meant to be applied to FNDECL. */
2518 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
2521 int i
, len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
2523 VEC(tree
, heap
) *parms
= ipa_get_vector_of_formal_parms (fndecl
);
2525 fprintf (file
, "IPA param adjustments: ");
2526 for (i
= 0; i
< len
; i
++)
2528 struct ipa_parm_adjustment
*adj
;
2529 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
2532 fprintf (file
, " ");
2536 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
2537 print_generic_expr (file
, VEC_index (tree
, parms
, adj
->base_index
), 0);
2540 fprintf (file
, ", base: ");
2541 print_generic_expr (file
, adj
->base
, 0);
2545 fprintf (file
, ", reduction: ");
2546 print_generic_expr (file
, adj
->reduction
, 0);
2548 if (adj
->new_ssa_base
)
2550 fprintf (file
, ", new_ssa_base: ");
2551 print_generic_expr (file
, adj
->new_ssa_base
, 0);
2554 if (adj
->copy_param
)
2555 fprintf (file
, ", copy_param");
2556 else if (adj
->remove_param
)
2557 fprintf (file
, ", remove_param");
2559 fprintf (file
, ", offset %li", (long) adj
->offset
);
2561 fprintf (file
, ", by_ref");
2562 print_node_brief (file
, ", type: ", adj
->type
, 0);
2563 fprintf (file
, "\n");
2565 VEC_free (tree
, heap
, parms
);
2568 /* Stream out jump function JUMP_FUNC to OB. */
2571 ipa_write_jump_function (struct output_block
*ob
,
2572 struct ipa_jump_func
*jump_func
)
2574 streamer_write_uhwi (ob
, jump_func
->type
);
2576 switch (jump_func
->type
)
2578 case IPA_JF_UNKNOWN
:
2580 case IPA_JF_KNOWN_TYPE
:
2581 stream_write_tree (ob
, jump_func
->value
.base_binfo
, true);
2584 stream_write_tree (ob
, jump_func
->value
.constant
, true);
2586 case IPA_JF_PASS_THROUGH
:
2587 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
2588 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
2589 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
2591 case IPA_JF_ANCESTOR
:
2592 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
2593 stream_write_tree (ob
, jump_func
->value
.ancestor
.type
, true);
2594 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
2596 case IPA_JF_CONST_MEMBER_PTR
:
2597 stream_write_tree (ob
, jump_func
->value
.member_cst
.pfn
, true);
2598 stream_write_tree (ob
, jump_func
->value
.member_cst
.delta
, false);
2603 /* Read in jump function JUMP_FUNC from IB. */
2606 ipa_read_jump_function (struct lto_input_block
*ib
,
2607 struct ipa_jump_func
*jump_func
,
2608 struct data_in
*data_in
)
2610 jump_func
->type
= (enum jump_func_type
) streamer_read_uhwi (ib
);
2612 switch (jump_func
->type
)
2614 case IPA_JF_UNKNOWN
:
2616 case IPA_JF_KNOWN_TYPE
:
2617 jump_func
->value
.base_binfo
= stream_read_tree (ib
, data_in
);
2620 jump_func
->value
.constant
= stream_read_tree (ib
, data_in
);
2622 case IPA_JF_PASS_THROUGH
:
2623 jump_func
->value
.pass_through
.operand
= stream_read_tree (ib
, data_in
);
2624 jump_func
->value
.pass_through
.formal_id
= streamer_read_uhwi (ib
);
2625 jump_func
->value
.pass_through
.operation
2626 = (enum tree_code
) streamer_read_uhwi (ib
);
2628 case IPA_JF_ANCESTOR
:
2629 jump_func
->value
.ancestor
.offset
= streamer_read_uhwi (ib
);
2630 jump_func
->value
.ancestor
.type
= stream_read_tree (ib
, data_in
);
2631 jump_func
->value
.ancestor
.formal_id
= streamer_read_uhwi (ib
);
2633 case IPA_JF_CONST_MEMBER_PTR
:
2634 jump_func
->value
.member_cst
.pfn
= stream_read_tree (ib
, data_in
);
2635 jump_func
->value
.member_cst
.delta
= stream_read_tree (ib
, data_in
);
2640 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2641 relevant to indirect inlining to OB. */
2644 ipa_write_indirect_edge_info (struct output_block
*ob
,
2645 struct cgraph_edge
*cs
)
2647 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2648 struct bitpack_d bp
;
2650 streamer_write_hwi (ob
, ii
->param_index
);
2651 streamer_write_hwi (ob
, ii
->anc_offset
);
2652 bp
= bitpack_create (ob
->main_stream
);
2653 bp_pack_value (&bp
, ii
->polymorphic
, 1);
2654 streamer_write_bitpack (&bp
);
2656 if (ii
->polymorphic
)
2658 streamer_write_hwi (ob
, ii
->otr_token
);
2659 stream_write_tree (ob
, ii
->otr_type
, true);
2663 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2664 relevant to indirect inlining from IB. */
2667 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
2668 struct data_in
*data_in ATTRIBUTE_UNUSED
,
2669 struct cgraph_edge
*cs
)
2671 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2672 struct bitpack_d bp
;
2674 ii
->param_index
= (int) streamer_read_hwi (ib
);
2675 ii
->anc_offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
2676 bp
= streamer_read_bitpack (ib
);
2677 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
2678 if (ii
->polymorphic
)
2680 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
2681 ii
->otr_type
= stream_read_tree (ib
, data_in
);
2685 /* Stream out NODE info to OB. */
2688 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
2691 lto_cgraph_encoder_t encoder
;
2692 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2694 struct cgraph_edge
*e
;
2695 struct bitpack_d bp
;
2697 encoder
= ob
->decl_state
->cgraph_node_encoder
;
2698 node_ref
= lto_cgraph_encoder_encode (encoder
, node
);
2699 streamer_write_uhwi (ob
, node_ref
);
2701 bp
= bitpack_create (ob
->main_stream
);
2702 gcc_assert (info
->uses_analysis_done
2703 || ipa_get_param_count (info
) == 0);
2704 gcc_assert (!info
->node_enqueued
);
2705 gcc_assert (!info
->ipcp_orig_node
);
2706 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
2707 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
2708 streamer_write_bitpack (&bp
);
2709 for (e
= node
->callees
; e
; e
= e
->next_callee
)
2711 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2713 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
2714 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
2715 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
2717 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
2719 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2721 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
2722 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
2723 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
2724 ipa_write_indirect_edge_info (ob
, e
);
2728 /* Stream in NODE info from IB. */
2731 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
2732 struct data_in
*data_in
)
2734 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2736 struct cgraph_edge
*e
;
2737 struct bitpack_d bp
;
2739 ipa_initialize_node_params (node
);
2741 bp
= streamer_read_bitpack (ib
);
2742 if (ipa_get_param_count (info
) != 0)
2743 info
->uses_analysis_done
= true;
2744 info
->node_enqueued
= false;
2745 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
2746 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
2747 for (e
= node
->callees
; e
; e
= e
->next_callee
)
2749 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2750 int count
= streamer_read_uhwi (ib
);
2754 VEC_safe_grow_cleared (ipa_jump_func_t
, gc
, args
->jump_functions
, count
);
2756 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
2757 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), data_in
);
2759 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
2761 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2762 int count
= streamer_read_uhwi (ib
);
2766 VEC_safe_grow_cleared (ipa_jump_func_t
, gc
, args
->jump_functions
,
2768 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
2769 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
),
2772 ipa_read_indirect_edge_info (ib
, data_in
, e
);
2776 /* Write jump functions for nodes in SET. */
2779 ipa_prop_write_jump_functions (cgraph_node_set set
)
2781 struct cgraph_node
*node
;
2782 struct output_block
*ob
;
2783 unsigned int count
= 0;
2784 cgraph_node_set_iterator csi
;
2786 if (!ipa_node_params_vector
)
2789 ob
= create_output_block (LTO_section_jump_functions
);
2790 ob
->cgraph_node
= NULL
;
2791 for (csi
= csi_start (set
); !csi_end_p (csi
); csi_next (&csi
))
2793 node
= csi_node (csi
);
2794 if (cgraph_function_with_gimple_body_p (node
)
2795 && IPA_NODE_REF (node
) != NULL
)
2799 streamer_write_uhwi (ob
, count
);
2801 /* Process all of the functions. */
2802 for (csi
= csi_start (set
); !csi_end_p (csi
); csi_next (&csi
))
2804 node
= csi_node (csi
);
2805 if (cgraph_function_with_gimple_body_p (node
)
2806 && IPA_NODE_REF (node
) != NULL
)
2807 ipa_write_node_info (ob
, node
);
2809 streamer_write_char_stream (ob
->main_stream
, 0);
2810 produce_asm (ob
, NULL
);
2811 destroy_output_block (ob
);
2814 /* Read section in file FILE_DATA of length LEN with data DATA. */
2817 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
2820 const struct lto_function_header
*header
=
2821 (const struct lto_function_header
*) data
;
2822 const int32_t cfg_offset
= sizeof (struct lto_function_header
);
2823 const int32_t main_offset
= cfg_offset
+ header
->cfg_size
;
2824 const int32_t string_offset
= main_offset
+ header
->main_size
;
2825 struct data_in
*data_in
;
2826 struct lto_input_block ib_main
;
2830 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
2834 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
2835 header
->string_size
, NULL
);
2836 count
= streamer_read_uhwi (&ib_main
);
2838 for (i
= 0; i
< count
; i
++)
2841 struct cgraph_node
*node
;
2842 lto_cgraph_encoder_t encoder
;
2844 index
= streamer_read_uhwi (&ib_main
);
2845 encoder
= file_data
->cgraph_node_encoder
;
2846 node
= lto_cgraph_encoder_deref (encoder
, index
);
2847 gcc_assert (node
->analyzed
);
2848 ipa_read_node_info (&ib_main
, node
, data_in
);
2850 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
2852 lto_data_in_delete (data_in
);
2855 /* Read ipcp jump functions. */
2858 ipa_prop_read_jump_functions (void)
2860 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
2861 struct lto_file_decl_data
*file_data
;
2864 ipa_check_create_node_params ();
2865 ipa_check_create_edge_args ();
2866 ipa_register_cgraph_hooks ();
2868 while ((file_data
= file_data_vec
[j
++]))
2871 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
2874 ipa_prop_read_section (file_data
, data
, len
);
2878 /* After merging units, we can get mismatch in argument counts.
2879 Also decl merging might've rendered parameter lists obsolete.
2880 Also compute called_with_variable_arg info. */
2883 ipa_update_after_lto_read (void)
2885 struct cgraph_node
*node
;
2887 ipa_check_create_node_params ();
2888 ipa_check_create_edge_args ();
2890 for (node
= cgraph_nodes
; node
; node
= node
->next
)
2892 ipa_initialize_node_params (node
);