1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "double-int.h"
34 #include "fold-const.h"
37 #include "hard-reg-set.h"
39 #include "dominance.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-fold.h"
46 #include "gimple-expr.h"
52 #include "statistics.h"
54 #include "fixed-value.h"
55 #include "insn-config.h"
64 #include "stor-layout.h"
65 #include "print-tree.h"
67 #include "gimple-iterator.h"
68 #include "gimplify-me.h"
69 #include "gimple-walk.h"
70 #include "langhooks.h"
73 #include "plugin-api.h"
76 #include "alloc-pool.h"
77 #include "symbol-summary.h"
80 #include "gimple-ssa.h"
82 #include "tree-phinodes.h"
83 #include "ssa-iterators.h"
84 #include "tree-into-ssa.h"
86 #include "tree-pass.h"
87 #include "tree-inline.h"
88 #include "ipa-inline.h"
89 #include "diagnostic.h"
90 #include "gimple-pretty-print.h"
91 #include "lto-streamer.h"
92 #include "data-streamer.h"
93 #include "tree-streamer.h"
95 #include "ipa-utils.h"
96 #include "stringpool.h"
97 #include "tree-ssanames.h"
100 #include "builtins.h"
102 /* Intermediate information that we get from alias analysis about a particular
103 parameter in a particular basic_block. When a parameter or the memory it
104 references is marked modified, we use that information in all dominatd
105 blocks without cosulting alias analysis oracle. */
107 struct param_aa_status
109 /* Set when this structure contains meaningful information. If not, the
110 structure describing a dominating BB should be used instead. */
113 /* Whether we have seen something which might have modified the data in
114 question. PARM is for the parameter itself, REF is for data it points to
115 but using the alias type of individual accesses and PT is the same thing
116 but for computing aggregate pass-through functions using a very inclusive
118 bool parm_modified
, ref_modified
, pt_modified
;
121 /* Information related to a given BB that used only when looking at function
126 /* Call graph edges going out of this BB. */
127 vec
<cgraph_edge
*> cg_edges
;
128 /* Alias analysis statuses of each formal parameter at this bb. */
129 vec
<param_aa_status
> param_aa_statuses
;
132 /* Structure with global information that is only used when looking at function
135 struct func_body_info
137 /* The node that is being analyzed. */
141 struct ipa_node_params
*info
;
143 /* Information about individual BBs. */
144 vec
<ipa_bb_info
> bb_infos
;
146 /* Number of parameters. */
149 /* Number of statements already walked by when analyzing this function. */
150 unsigned int aa_walked
;
153 /* Function summary where the parameter infos are actually stored. */
154 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
155 /* Vector of IPA-CP transformation data for each clone. */
156 vec
<ipcp_transformation_summary
, va_gc
> *ipcp_transformations
;
157 /* Vector where the parameter infos are actually stored. */
158 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
160 /* Holders of ipa cgraph hooks: */
161 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
162 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
163 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
165 /* Description of a reference to an IPA constant. */
166 struct ipa_cst_ref_desc
168 /* Edge that corresponds to the statement which took the reference. */
169 struct cgraph_edge
*cs
;
170 /* Linked list of duplicates created when call graph edges are cloned. */
171 struct ipa_cst_ref_desc
*next_duplicate
;
172 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
173 if out of control. */
177 /* Allocation pool for reference descriptions. */
179 static pool_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
180 ("IPA-PROP ref descriptions", 32);
182 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
183 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
186 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
188 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
192 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
195 /* Return index of the formal whose tree is PTREE in function which corresponds
199 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
203 count
= descriptors
.length ();
204 for (i
= 0; i
< count
; i
++)
205 if (descriptors
[i
].decl
== ptree
)
211 /* Return index of the formal whose tree is PTREE in function which corresponds
215 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
217 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
220 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
224 ipa_populate_param_decls (struct cgraph_node
*node
,
225 vec
<ipa_param_descriptor
> &descriptors
)
233 gcc_assert (gimple_has_body_p (fndecl
));
234 fnargs
= DECL_ARGUMENTS (fndecl
);
236 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
238 descriptors
[param_num
].decl
= parm
;
239 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
245 /* Return how many formal parameters FNDECL has. */
248 count_formal_params (tree fndecl
)
252 gcc_assert (gimple_has_body_p (fndecl
));
254 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
260 /* Return the declaration of Ith formal parameter of the function corresponding
261 to INFO. Note there is no setter function as this array is built just once
262 using ipa_initialize_node_params. */
265 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
267 fprintf (file
, "param #%i", i
);
268 if (info
->descriptors
[i
].decl
)
271 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
275 /* Initialize the ipa_node_params structure associated with NODE
276 to hold PARAM_COUNT parameters. */
279 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
281 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
283 if (!info
->descriptors
.exists () && param_count
)
284 info
->descriptors
.safe_grow_cleared (param_count
);
287 /* Initialize the ipa_node_params structure associated with NODE by counting
288 the function parameters, creating the descriptors and populating their
292 ipa_initialize_node_params (struct cgraph_node
*node
)
294 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
296 if (!info
->descriptors
.exists ())
298 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
299 ipa_populate_param_decls (node
, info
->descriptors
);
303 /* Print the jump functions associated with call graph edge CS to file F. */
306 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
310 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
311 for (i
= 0; i
< count
; i
++)
313 struct ipa_jump_func
*jump_func
;
314 enum jump_func_type type
;
316 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
317 type
= jump_func
->type
;
319 fprintf (f
, " param %d: ", i
);
320 if (type
== IPA_JF_UNKNOWN
)
321 fprintf (f
, "UNKNOWN\n");
322 else if (type
== IPA_JF_CONST
)
324 tree val
= jump_func
->value
.constant
.value
;
325 fprintf (f
, "CONST: ");
326 print_generic_expr (f
, val
, 0);
327 if (TREE_CODE (val
) == ADDR_EXPR
328 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
331 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
336 else if (type
== IPA_JF_PASS_THROUGH
)
338 fprintf (f
, "PASS THROUGH: ");
339 fprintf (f
, "%d, op %s",
340 jump_func
->value
.pass_through
.formal_id
,
341 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
342 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
345 print_generic_expr (f
,
346 jump_func
->value
.pass_through
.operand
, 0);
348 if (jump_func
->value
.pass_through
.agg_preserved
)
349 fprintf (f
, ", agg_preserved");
352 else if (type
== IPA_JF_ANCESTOR
)
354 fprintf (f
, "ANCESTOR: ");
355 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
356 jump_func
->value
.ancestor
.formal_id
,
357 jump_func
->value
.ancestor
.offset
);
358 if (jump_func
->value
.ancestor
.agg_preserved
)
359 fprintf (f
, ", agg_preserved");
363 if (jump_func
->agg
.items
)
365 struct ipa_agg_jf_item
*item
;
368 fprintf (f
, " Aggregate passed by %s:\n",
369 jump_func
->agg
.by_ref
? "reference" : "value");
370 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
372 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
374 if (TYPE_P (item
->value
))
375 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
376 tree_to_uhwi (TYPE_SIZE (item
->value
)));
379 fprintf (f
, "cst: ");
380 print_generic_expr (f
, item
->value
, 0);
386 struct ipa_polymorphic_call_context
*ctx
387 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
388 if (ctx
&& !ctx
->useless_p ())
390 fprintf (f
, " Context: ");
391 ctx
->dump (dump_file
);
394 if (jump_func
->alignment
.known
)
396 fprintf (f
, " Alignment: %u, misalignment: %u\n",
397 jump_func
->alignment
.align
,
398 jump_func
->alignment
.misalign
);
401 fprintf (f
, " Unknown alignment\n");
406 /* Print the jump functions of all arguments on all call graph edges going from
410 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
412 struct cgraph_edge
*cs
;
414 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
416 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
418 if (!ipa_edge_args_info_available_for_edge_p (cs
))
421 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
422 xstrdup_for_dump (node
->name ()), node
->order
,
423 xstrdup_for_dump (cs
->callee
->name ()),
425 ipa_print_node_jump_functions_for_edge (f
, cs
);
428 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
430 struct cgraph_indirect_call_info
*ii
;
431 if (!ipa_edge_args_info_available_for_edge_p (cs
))
434 ii
= cs
->indirect_info
;
435 if (ii
->agg_contents
)
436 fprintf (f
, " indirect %s callsite, calling param %i, "
437 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
438 ii
->member_ptr
? "member ptr" : "aggregate",
439 ii
->param_index
, ii
->offset
,
440 ii
->by_ref
? "by reference" : "by_value");
442 fprintf (f
, " indirect %s callsite, calling param %i, "
443 "offset " HOST_WIDE_INT_PRINT_DEC
,
444 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
449 fprintf (f
, ", for stmt ");
450 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
455 ii
->context
.dump (f
);
456 ipa_print_node_jump_functions_for_edge (f
, cs
);
460 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
463 ipa_print_all_jump_functions (FILE *f
)
465 struct cgraph_node
*node
;
467 fprintf (f
, "\nJump functions:\n");
468 FOR_EACH_FUNCTION (node
)
470 ipa_print_node_jump_functions (f
, node
);
474 /* Set jfunc to be a know-really nothing jump function. */
477 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
479 jfunc
->type
= IPA_JF_UNKNOWN
;
480 jfunc
->alignment
.known
= false;
483 /* Set JFUNC to be a copy of another jmp (to be used by jump function
484 combination code). The two functions will share their rdesc. */
487 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
488 struct ipa_jump_func
*src
)
491 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
492 dst
->type
= IPA_JF_CONST
;
493 dst
->value
.constant
= src
->value
.constant
;
496 /* Set JFUNC to be a constant jmp function. */
499 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
500 struct cgraph_edge
*cs
)
502 constant
= unshare_expr (constant
);
503 if (constant
&& EXPR_P (constant
))
504 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
505 jfunc
->type
= IPA_JF_CONST
;
506 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
508 if (TREE_CODE (constant
) == ADDR_EXPR
509 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
511 struct ipa_cst_ref_desc
*rdesc
;
513 rdesc
= ipa_refdesc_pool
.allocate ();
515 rdesc
->next_duplicate
= NULL
;
517 jfunc
->value
.constant
.rdesc
= rdesc
;
520 jfunc
->value
.constant
.rdesc
= NULL
;
523 /* Set JFUNC to be a simple pass-through jump function. */
525 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
528 jfunc
->type
= IPA_JF_PASS_THROUGH
;
529 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
530 jfunc
->value
.pass_through
.formal_id
= formal_id
;
531 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
532 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
535 /* Set JFUNC to be an arithmetic pass through jump function. */
538 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
539 tree operand
, enum tree_code operation
)
541 jfunc
->type
= IPA_JF_PASS_THROUGH
;
542 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
543 jfunc
->value
.pass_through
.formal_id
= formal_id
;
544 jfunc
->value
.pass_through
.operation
= operation
;
545 jfunc
->value
.pass_through
.agg_preserved
= false;
548 /* Set JFUNC to be an ancestor jump function. */
551 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
552 int formal_id
, bool agg_preserved
)
554 jfunc
->type
= IPA_JF_ANCESTOR
;
555 jfunc
->value
.ancestor
.formal_id
= formal_id
;
556 jfunc
->value
.ancestor
.offset
= offset
;
557 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
560 /* Get IPA BB information about the given BB. FBI is the context of analyzis
561 of this function body. */
563 static struct ipa_bb_info
*
564 ipa_get_bb_info (struct func_body_info
*fbi
, basic_block bb
)
566 gcc_checking_assert (fbi
);
567 return &fbi
->bb_infos
[bb
->index
];
570 /* Structure to be passed in between detect_type_change and
571 check_stmt_for_type_change. */
573 struct prop_type_change_info
575 /* Offset into the object where there is the virtual method pointer we are
577 HOST_WIDE_INT offset
;
578 /* The declaration or SSA_NAME pointer of the base that we are checking for
581 /* Set to true if dynamic type change has been detected. */
582 bool type_maybe_changed
;
585 /* Return true if STMT can modify a virtual method table pointer.
587 This function makes special assumptions about both constructors and
588 destructors which are all the functions that are allowed to alter the VMT
589 pointers. It assumes that destructors begin with assignment into all VMT
590 pointers and that constructors essentially look in the following way:
592 1) The very first thing they do is that they call constructors of ancestor
593 sub-objects that have them.
595 2) Then VMT pointers of this and all its ancestors is set to new values
596 corresponding to the type corresponding to the constructor.
598 3) Only afterwards, other stuff such as constructor of member sub-objects
599 and the code written by the user is run. Only this may include calling
600 virtual functions, directly or indirectly.
602 There is no way to call a constructor of an ancestor sub-object in any
605 This means that we do not have to care whether constructors get the correct
606 type information because they will always change it (in fact, if we define
607 the type to be given by the VMT pointer, it is undefined).
609 The most important fact to derive from the above is that if, for some
610 statement in the section 3, we try to detect whether the dynamic type has
611 changed, we can safely ignore all calls as we examine the function body
612 backwards until we reach statements in section 2 because these calls cannot
613 be ancestor constructors or destructors (if the input is not bogus) and so
614 do not change the dynamic type (this holds true only for automatically
615 allocated objects but at the moment we devirtualize only these). We then
616 must detect that statements in section 2 change the dynamic type and can try
617 to derive the new type. That is enough and we can stop, we will never see
618 the calls into constructors of sub-objects in this code. Therefore we can
619 safely ignore all call statements that we traverse.
623 stmt_may_be_vtbl_ptr_store (gimple stmt
)
625 if (is_gimple_call (stmt
))
627 if (gimple_clobber_p (stmt
))
629 else if (is_gimple_assign (stmt
))
631 tree lhs
= gimple_assign_lhs (stmt
);
633 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
635 if (flag_strict_aliasing
636 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
639 if (TREE_CODE (lhs
) == COMPONENT_REF
640 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
642 /* In the future we might want to use get_base_ref_and_offset to find
643 if there is a field corresponding to the offset and if so, proceed
644 almost like if it was a component ref. */
650 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
651 to check whether a particular statement may modify the virtual table
652 pointerIt stores its result into DATA, which points to a
653 prop_type_change_info structure. */
656 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
658 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
659 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
661 if (stmt_may_be_vtbl_ptr_store (stmt
))
663 tci
->type_maybe_changed
= true;
670 /* See if ARG is PARAM_DECl describing instance passed by pointer
671 or reference in FUNCTION. Return false if the dynamic type may change
672 in between beggining of the function until CALL is invoked.
674 Generally functions are not allowed to change type of such instances,
675 but they call destructors. We assume that methods can not destroy the THIS
676 pointer. Also as a special cases, constructor and destructors may change
677 type of the THIS pointer. */
680 param_type_may_change_p (tree function
, tree arg
, gimple call
)
682 /* Pure functions can not do any changes on the dynamic type;
683 that require writting to memory. */
684 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
686 /* We need to check if we are within inlined consturctor
687 or destructor (ideally we would have way to check that the
688 inline cdtor is actually working on ARG, but we don't have
689 easy tie on this, so punt on all non-pure cdtors.
690 We may also record the types of cdtors and once we know type
691 of the instance match them.
693 Also code unification optimizations may merge calls from
694 different blocks making return values unreliable. So
695 do nothing during late optimization. */
696 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
698 if (TREE_CODE (arg
) == SSA_NAME
699 && SSA_NAME_IS_DEFAULT_DEF (arg
)
700 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
702 /* Normal (non-THIS) argument. */
703 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
704 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
705 /* THIS pointer of an method - here we we want to watch constructors
706 and destructors as those definitely may change the dynamic
708 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
709 && !DECL_CXX_CONSTRUCTOR_P (function
)
710 && !DECL_CXX_DESTRUCTOR_P (function
)
711 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
713 /* Walk the inline stack and watch out for ctors/dtors. */
714 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
715 block
= BLOCK_SUPERCONTEXT (block
))
716 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
724 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
725 callsite CALL) by looking for assignments to its virtual table pointer. If
726 it is, return true and fill in the jump function JFUNC with relevant type
727 information or set it to unknown. ARG is the object itself (not a pointer
728 to it, unless dereferenced). BASE is the base of the memory access as
729 returned by get_ref_base_and_extent, as is the offset.
731 This is helper function for detect_type_change and detect_type_change_ssa
732 that does the heavy work which is usually unnecesary. */
735 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
736 gcall
*call
, struct ipa_jump_func
*jfunc
,
737 HOST_WIDE_INT offset
)
739 struct prop_type_change_info tci
;
741 bool entry_reached
= false;
743 gcc_checking_assert (DECL_P (arg
)
744 || TREE_CODE (arg
) == MEM_REF
745 || handled_component_p (arg
));
747 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
749 /* Const calls cannot call virtual methods through VMT and so type changes do
751 if (!flag_devirtualize
|| !gimple_vuse (call
)
752 /* Be sure expected_type is polymorphic. */
754 || TREE_CODE (comp_type
) != RECORD_TYPE
755 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
756 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
759 ao_ref_init (&ao
, arg
);
762 ao
.size
= POINTER_SIZE
;
763 ao
.max_size
= ao
.size
;
766 tci
.object
= get_base_address (arg
);
767 tci
.type_maybe_changed
= false;
769 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
770 &tci
, NULL
, &entry_reached
);
771 if (!tci
.type_maybe_changed
)
774 ipa_set_jf_unknown (jfunc
);
778 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
779 If it is, return true and fill in the jump function JFUNC with relevant type
780 information or set it to unknown. ARG is the object itself (not a pointer
781 to it, unless dereferenced). BASE is the base of the memory access as
782 returned by get_ref_base_and_extent, as is the offset. */
785 detect_type_change (tree arg
, tree base
, tree comp_type
, gcall
*call
,
786 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
788 if (!flag_devirtualize
)
791 if (TREE_CODE (base
) == MEM_REF
792 && !param_type_may_change_p (current_function_decl
,
793 TREE_OPERAND (base
, 0),
796 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
797 call
, jfunc
, offset
);
800 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
801 SSA name (its dereference will become the base and the offset is assumed to
805 detect_type_change_ssa (tree arg
, tree comp_type
,
806 gcall
*call
, struct ipa_jump_func
*jfunc
)
808 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
809 if (!flag_devirtualize
810 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
813 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
816 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
817 build_int_cst (ptr_type_node
, 0));
819 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
823 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
824 boolean variable pointed to by DATA. */
827 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
830 bool *b
= (bool *) data
;
835 /* Return true if we have already walked so many statements in AA that we
836 should really just start giving up. */
839 aa_overwalked (struct func_body_info
*fbi
)
841 gcc_checking_assert (fbi
);
842 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
845 /* Find the nearest valid aa status for parameter specified by INDEX that
848 static struct param_aa_status
*
849 find_dominating_aa_status (struct func_body_info
*fbi
, basic_block bb
,
854 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
857 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
858 if (!bi
->param_aa_statuses
.is_empty ()
859 && bi
->param_aa_statuses
[index
].valid
)
860 return &bi
->param_aa_statuses
[index
];
864 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
865 structures and/or intialize the result with a dominating description as
868 static struct param_aa_status
*
869 parm_bb_aa_status_for_bb (struct func_body_info
*fbi
, basic_block bb
,
872 gcc_checking_assert (fbi
);
873 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
874 if (bi
->param_aa_statuses
.is_empty ())
875 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
876 struct param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
879 gcc_checking_assert (!paa
->parm_modified
880 && !paa
->ref_modified
881 && !paa
->pt_modified
);
882 struct param_aa_status
*dom_paa
;
883 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
893 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
894 a value known not to be modified in this function before reaching the
895 statement STMT. FBI holds information about the function we have so far
896 gathered but do not survive the summary building stage. */
899 parm_preserved_before_stmt_p (struct func_body_info
*fbi
, int index
,
900 gimple stmt
, tree parm_load
)
902 struct param_aa_status
*paa
;
903 bool modified
= false;
906 /* FIXME: FBI can be NULL if we are being called from outside
907 ipa_node_analysis or ipcp_transform_function, which currently happens
908 during inlining analysis. It would be great to extend fbi's lifetime and
909 always have it. Currently, we are just not afraid of too much walking in
913 if (aa_overwalked (fbi
))
915 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
916 if (paa
->parm_modified
)
922 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
923 ao_ref_init (&refd
, parm_load
);
924 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
927 fbi
->aa_walked
+= walked
;
929 paa
->parm_modified
= true;
933 /* If STMT is an assignment that loads a value from an parameter declaration,
934 return the index of the parameter in ipa_node_params which has not been
935 modified. Otherwise return -1. */
938 load_from_unmodified_param (struct func_body_info
*fbi
,
939 vec
<ipa_param_descriptor
> descriptors
,
945 if (!gimple_assign_single_p (stmt
))
948 op1
= gimple_assign_rhs1 (stmt
);
949 if (TREE_CODE (op1
) != PARM_DECL
)
952 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
954 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
960 /* Return true if memory reference REF (which must be a load through parameter
961 with INDEX) loads data that are known to be unmodified in this function
962 before reaching statement STMT. */
965 parm_ref_data_preserved_p (struct func_body_info
*fbi
,
966 int index
, gimple stmt
, tree ref
)
968 struct param_aa_status
*paa
;
969 bool modified
= false;
972 /* FIXME: FBI can be NULL if we are being called from outside
973 ipa_node_analysis or ipcp_transform_function, which currently happens
974 during inlining analysis. It would be great to extend fbi's lifetime and
975 always have it. Currently, we are just not afraid of too much walking in
979 if (aa_overwalked (fbi
))
981 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
982 if (paa
->ref_modified
)
988 gcc_checking_assert (gimple_vuse (stmt
));
989 ao_ref_init (&refd
, ref
);
990 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
993 fbi
->aa_walked
+= walked
;
995 paa
->ref_modified
= true;
999 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1000 is known to be unmodified in this function before reaching call statement
1001 CALL into which it is passed. FBI describes the function body. */
1004 parm_ref_data_pass_through_p (struct func_body_info
*fbi
, int index
,
1005 gimple call
, tree parm
)
1007 bool modified
= false;
1010 /* It's unnecessary to calculate anything about memory contnets for a const
1011 function because it is not goin to use it. But do not cache the result
1012 either. Also, no such calculations for non-pointers. */
1013 if (!gimple_vuse (call
)
1014 || !POINTER_TYPE_P (TREE_TYPE (parm
))
1015 || aa_overwalked (fbi
))
1018 struct param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (call
),
1020 if (paa
->pt_modified
)
1023 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1024 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1026 fbi
->aa_walked
+= walked
;
1028 paa
->pt_modified
= true;
1032 /* Return true if we can prove that OP is a memory reference loading unmodified
1033 data from an aggregate passed as a parameter and if the aggregate is passed
1034 by reference, that the alias type of the load corresponds to the type of the
1035 formal parameter (so that we can rely on this type for TBAA in callers).
1036 INFO and PARMS_AINFO describe parameters of the current function (but the
1037 latter can be NULL), STMT is the load statement. If function returns true,
1038 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1039 within the aggregate and whether it is a load from a value passed by
1040 reference respectively. */
1043 ipa_load_from_parm_agg_1 (struct func_body_info
*fbi
,
1044 vec
<ipa_param_descriptor
> descriptors
,
1045 gimple stmt
, tree op
, int *index_p
,
1046 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
1050 HOST_WIDE_INT size
, max_size
;
1051 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
1053 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
1058 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1060 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1071 if (TREE_CODE (base
) != MEM_REF
1072 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1073 || !integer_zerop (TREE_OPERAND (base
, 1)))
1076 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1078 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1079 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1083 /* This branch catches situations where a pointer parameter is not a
1084 gimple register, for example:
1086 void hip7(S*) (struct S * p)
1088 void (*<T2e4>) (struct S *) D.1867;
1093 D.1867_2 = p.1_1->f;
1098 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1099 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1103 && parm_ref_data_preserved_p (fbi
, index
, stmt
, op
))
1114 /* Just like the previous function, just without the param_analysis_info
1115 pointer, for users outside of this file. */
1118 ipa_load_from_parm_agg (struct ipa_node_params
*info
, gimple stmt
,
1119 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
1122 return ipa_load_from_parm_agg_1 (NULL
, info
->descriptors
, stmt
, op
, index_p
,
1123 offset_p
, NULL
, by_ref_p
);
1126 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1127 of an assignment statement STMT, try to determine whether we are actually
1128 handling any of the following cases and construct an appropriate jump
1129 function into JFUNC if so:
1131 1) The passed value is loaded from a formal parameter which is not a gimple
1132 register (most probably because it is addressable, the value has to be
1133 scalar) and we can guarantee the value has not changed. This case can
1134 therefore be described by a simple pass-through jump function. For example:
1143 2) The passed value can be described by a simple arithmetic pass-through
1150 D.2064_4 = a.1(D) + 4;
1153 This case can also occur in combination of the previous one, e.g.:
1161 D.2064_4 = a.0_3 + 4;
1164 3) The passed value is an address of an object within another one (which
1165 also passed by reference). Such situations are described by an ancestor
1166 jump function and describe situations such as:
1168 B::foo() (struct B * const this)
1172 D.1845_2 = &this_1(D)->D.1748;
1175 INFO is the structure describing individual parameters access different
1176 stages of IPA optimizations. PARMS_AINFO contains the information that is
1177 only needed for intraprocedural analysis. */
1180 compute_complex_assign_jump_func (struct func_body_info
*fbi
,
1181 struct ipa_node_params
*info
,
1182 struct ipa_jump_func
*jfunc
,
1183 gcall
*call
, gimple stmt
, tree name
,
1186 HOST_WIDE_INT offset
, size
, max_size
;
1187 tree op1
, tc_ssa
, base
, ssa
;
1190 op1
= gimple_assign_rhs1 (stmt
);
1192 if (TREE_CODE (op1
) == SSA_NAME
)
1194 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1195 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1197 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1198 SSA_NAME_DEF_STMT (op1
));
1203 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1204 tc_ssa
= gimple_assign_lhs (stmt
);
1209 tree op2
= gimple_assign_rhs2 (stmt
);
1213 if (!is_gimple_ip_invariant (op2
)
1214 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1215 && !useless_type_conversion_p (TREE_TYPE (name
),
1219 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1220 gimple_assign_rhs_code (stmt
));
1222 else if (gimple_assign_single_p (stmt
))
1224 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, tc_ssa
);
1225 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1230 if (TREE_CODE (op1
) != ADDR_EXPR
)
1232 op1
= TREE_OPERAND (op1
, 0);
1233 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1235 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1236 if (TREE_CODE (base
) != MEM_REF
1237 /* If this is a varying address, punt. */
1239 || max_size
!= size
)
1241 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
1242 ssa
= TREE_OPERAND (base
, 0);
1243 if (TREE_CODE (ssa
) != SSA_NAME
1244 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1248 /* Dynamic types are changed in constructors and destructors. */
1249 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1250 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1251 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1252 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1255 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1258 iftmp.1_3 = &obj_2(D)->D.1762;
1260 The base of the MEM_REF must be a default definition SSA NAME of a
1261 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1262 whole MEM_REF expression is returned and the offset calculated from any
1263 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1264 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1267 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1269 HOST_WIDE_INT size
, max_size
;
1270 tree expr
, parm
, obj
;
1272 if (!gimple_assign_single_p (assign
))
1274 expr
= gimple_assign_rhs1 (assign
);
1276 if (TREE_CODE (expr
) != ADDR_EXPR
)
1278 expr
= TREE_OPERAND (expr
, 0);
1280 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1282 if (TREE_CODE (expr
) != MEM_REF
1283 /* If this is a varying address, punt. */
1288 parm
= TREE_OPERAND (expr
, 0);
1289 if (TREE_CODE (parm
) != SSA_NAME
1290 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1291 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1294 *offset
+= mem_ref_offset (expr
).to_short_addr () * BITS_PER_UNIT
;
1300 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1301 statement PHI, try to find out whether NAME is in fact a
1302 multiple-inheritance typecast from a descendant into an ancestor of a formal
1303 parameter and thus can be described by an ancestor jump function and if so,
1304 write the appropriate function into JFUNC.
1306 Essentially we want to match the following pattern:
1314 iftmp.1_3 = &obj_2(D)->D.1762;
1317 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1318 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1322 compute_complex_ancestor_jump_func (struct func_body_info
*fbi
,
1323 struct ipa_node_params
*info
,
1324 struct ipa_jump_func
*jfunc
,
1325 gcall
*call
, gphi
*phi
)
1327 HOST_WIDE_INT offset
;
1328 gimple assign
, cond
;
1329 basic_block phi_bb
, assign_bb
, cond_bb
;
1330 tree tmp
, parm
, expr
, obj
;
1333 if (gimple_phi_num_args (phi
) != 2)
1336 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1337 tmp
= PHI_ARG_DEF (phi
, 0);
1338 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1339 tmp
= PHI_ARG_DEF (phi
, 1);
1342 if (TREE_CODE (tmp
) != SSA_NAME
1343 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1344 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1345 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1348 assign
= SSA_NAME_DEF_STMT (tmp
);
1349 assign_bb
= gimple_bb (assign
);
1350 if (!single_pred_p (assign_bb
))
1352 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1355 parm
= TREE_OPERAND (expr
, 0);
1356 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1360 cond_bb
= single_pred (assign_bb
);
1361 cond
= last_stmt (cond_bb
);
1363 || gimple_code (cond
) != GIMPLE_COND
1364 || gimple_cond_code (cond
) != NE_EXPR
1365 || gimple_cond_lhs (cond
) != parm
1366 || !integer_zerop (gimple_cond_rhs (cond
)))
1369 phi_bb
= gimple_bb (phi
);
1370 for (i
= 0; i
< 2; i
++)
1372 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1373 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1377 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1378 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1381 /* Inspect the given TYPE and return true iff it has the same structure (the
1382 same number of fields of the same types) as a C++ member pointer. If
1383 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1384 corresponding fields there. */
1387 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1391 if (TREE_CODE (type
) != RECORD_TYPE
)
1394 fld
= TYPE_FIELDS (type
);
1395 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1396 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1397 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1403 fld
= DECL_CHAIN (fld
);
1404 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1405 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1410 if (DECL_CHAIN (fld
))
1416 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1417 return the rhs of its defining statement. Otherwise return RHS as it
1421 get_ssa_def_if_simple_copy (tree rhs
)
1423 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1425 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1427 if (gimple_assign_single_p (def_stmt
))
1428 rhs
= gimple_assign_rhs1 (def_stmt
);
1435 /* Simple linked list, describing known contents of an aggregate beforere
1438 struct ipa_known_agg_contents_list
1440 /* Offset and size of the described part of the aggregate. */
1441 HOST_WIDE_INT offset
, size
;
1442 /* Known constant value or NULL if the contents is known to be unknown. */
1444 /* Pointer to the next structure in the list. */
1445 struct ipa_known_agg_contents_list
*next
;
1448 /* Find the proper place in linked list of ipa_known_agg_contents_list
1449 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1450 unless there is a partial overlap, in which case return NULL, or such
1451 element is already there, in which case set *ALREADY_THERE to true. */
1453 static struct ipa_known_agg_contents_list
**
1454 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1455 HOST_WIDE_INT lhs_offset
,
1456 HOST_WIDE_INT lhs_size
,
1457 bool *already_there
)
1459 struct ipa_known_agg_contents_list
**p
= list
;
1460 while (*p
&& (*p
)->offset
< lhs_offset
)
1462 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1467 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1469 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1470 /* We already know this value is subsequently overwritten with
1472 *already_there
= true;
1474 /* Otherwise this is a partial overlap which we cannot
1481 /* Build aggregate jump function from LIST, assuming there are exactly
1482 CONST_COUNT constant entries there and that th offset of the passed argument
1483 is ARG_OFFSET and store it into JFUNC. */
1486 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1487 int const_count
, HOST_WIDE_INT arg_offset
,
1488 struct ipa_jump_func
*jfunc
)
1490 vec_alloc (jfunc
->agg
.items
, const_count
);
1495 struct ipa_agg_jf_item item
;
1496 item
.offset
= list
->offset
- arg_offset
;
1497 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1498 item
.value
= unshare_expr_without_location (list
->constant
);
1499 jfunc
->agg
.items
->quick_push (item
);
1505 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1506 in ARG is filled in with constant values. ARG can either be an aggregate
1507 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1508 aggregate. JFUNC is the jump function into which the constants are
1509 subsequently stored. */
1512 determine_locally_known_aggregate_parts (gcall
*call
, tree arg
,
1514 struct ipa_jump_func
*jfunc
)
1516 struct ipa_known_agg_contents_list
*list
= NULL
;
1517 int item_count
= 0, const_count
= 0;
1518 HOST_WIDE_INT arg_offset
, arg_size
;
1519 gimple_stmt_iterator gsi
;
1521 bool check_ref
, by_ref
;
1524 /* The function operates in three stages. First, we prepare check_ref, r,
1525 arg_base and arg_offset based on what is actually passed as an actual
1528 if (POINTER_TYPE_P (arg_type
))
1531 if (TREE_CODE (arg
) == SSA_NAME
)
1534 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1539 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1540 arg_size
= tree_to_uhwi (type_size
);
1541 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1543 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1545 HOST_WIDE_INT arg_max_size
;
1547 arg
= TREE_OPERAND (arg
, 0);
1548 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1550 if (arg_max_size
== -1
1551 || arg_max_size
!= arg_size
1554 if (DECL_P (arg_base
))
1557 ao_ref_init (&r
, arg_base
);
1567 HOST_WIDE_INT arg_max_size
;
1569 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1573 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1575 if (arg_max_size
== -1
1576 || arg_max_size
!= arg_size
1580 ao_ref_init (&r
, arg
);
1583 /* Second stage walks back the BB, looks at individual statements and as long
1584 as it is confident of how the statements affect contents of the
1585 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1587 gsi
= gsi_for_stmt (call
);
1589 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1591 struct ipa_known_agg_contents_list
*n
, **p
;
1592 gimple stmt
= gsi_stmt (gsi
);
1593 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1594 tree lhs
, rhs
, lhs_base
;
1596 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1598 if (!gimple_assign_single_p (stmt
))
1601 lhs
= gimple_assign_lhs (stmt
);
1602 rhs
= gimple_assign_rhs1 (stmt
);
1603 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1604 || TREE_CODE (lhs
) == BIT_FIELD_REF
1605 || contains_bitfld_component_ref_p (lhs
))
1608 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1610 if (lhs_max_size
== -1
1611 || lhs_max_size
!= lhs_size
)
1616 if (TREE_CODE (lhs_base
) != MEM_REF
1617 || TREE_OPERAND (lhs_base
, 0) != arg_base
1618 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1621 else if (lhs_base
!= arg_base
)
1623 if (DECL_P (lhs_base
))
1629 bool already_there
= false;
1630 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1637 rhs
= get_ssa_def_if_simple_copy (rhs
);
1638 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1640 n
->offset
= lhs_offset
;
1641 if (is_gimple_ip_invariant (rhs
))
1647 n
->constant
= NULL_TREE
;
1652 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1653 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1657 /* Third stage just goes over the list and creates an appropriate vector of
1658 ipa_agg_jf_item structures out of it, of sourse only if there are
1659 any known constants to begin with. */
1663 jfunc
->agg
.by_ref
= by_ref
;
1664 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1669 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1672 tree type
= (e
->callee
1673 ? TREE_TYPE (e
->callee
->decl
)
1674 : gimple_call_fntype (e
->call_stmt
));
1675 tree t
= TYPE_ARG_TYPES (type
);
1677 for (n
= 0; n
< i
; n
++)
1684 return TREE_VALUE (t
);
1687 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1688 for (n
= 0; n
< i
; n
++)
1695 return TREE_TYPE (t
);
1699 /* Compute jump function for all arguments of callsite CS and insert the
1700 information in the jump_functions array in the ipa_edge_args corresponding
1701 to this callsite. */
1704 ipa_compute_jump_functions_for_edge (struct func_body_info
*fbi
,
1705 struct cgraph_edge
*cs
)
1707 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1708 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1709 gcall
*call
= cs
->call_stmt
;
1710 int n
, arg_num
= gimple_call_num_args (call
);
1711 bool useful_context
= false;
1713 if (arg_num
== 0 || args
->jump_functions
)
1715 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1716 if (flag_devirtualize
)
1717 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1719 if (gimple_call_internal_p (call
))
1721 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1724 for (n
= 0; n
< arg_num
; n
++)
1726 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1727 tree arg
= gimple_call_arg (call
, n
);
1728 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1729 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1732 struct ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1735 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
);
1736 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1737 if (!context
.useless_p ())
1738 useful_context
= true;
1741 if (POINTER_TYPE_P (TREE_TYPE(arg
)))
1743 unsigned HOST_WIDE_INT hwi_bitpos
;
1746 if (get_pointer_alignment_1 (arg
, &align
, &hwi_bitpos
)
1747 && align
% BITS_PER_UNIT
== 0
1748 && hwi_bitpos
% BITS_PER_UNIT
== 0)
1750 jfunc
->alignment
.known
= true;
1751 jfunc
->alignment
.align
= align
/ BITS_PER_UNIT
;
1752 jfunc
->alignment
.misalign
= hwi_bitpos
/ BITS_PER_UNIT
;
1755 gcc_assert (!jfunc
->alignment
.known
);
1758 gcc_assert (!jfunc
->alignment
.known
);
1760 if (is_gimple_ip_invariant (arg
))
1761 ipa_set_jf_constant (jfunc
, arg
, cs
);
1762 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1763 && TREE_CODE (arg
) == PARM_DECL
)
1765 int index
= ipa_get_param_decl_index (info
, arg
);
1767 gcc_assert (index
>=0);
1768 /* Aggregate passed by value, check for pass-through, otherwise we
1769 will attempt to fill in aggregate contents later in this
1771 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1773 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1777 else if (TREE_CODE (arg
) == SSA_NAME
)
1779 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1781 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1785 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1786 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1791 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1792 if (is_gimple_assign (stmt
))
1793 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1794 call
, stmt
, arg
, param_type
);
1795 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1796 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1798 as_a
<gphi
*> (stmt
));
1802 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1803 passed (because type conversions are ignored in gimple). Usually we can
1804 safely get type from function declaration, but in case of K&R prototypes or
1805 variadic functions we can try our luck with type of the pointer passed.
1806 TODO: Since we look for actual initialization of the memory object, we may better
1807 work out the type based on the memory stores we find. */
1809 param_type
= TREE_TYPE (arg
);
1811 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1812 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1813 && (jfunc
->type
!= IPA_JF_ANCESTOR
1814 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1815 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1816 || POINTER_TYPE_P (param_type
)))
1817 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1819 if (!useful_context
)
1820 vec_free (args
->polymorphic_call_contexts
);
1823 /* Compute jump functions for all edges - both direct and indirect - outgoing
1827 ipa_compute_jump_functions_for_bb (struct func_body_info
*fbi
, basic_block bb
)
1829 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1831 struct cgraph_edge
*cs
;
1833 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
1835 struct cgraph_node
*callee
= cs
->callee
;
1839 callee
->ultimate_alias_target ();
1840 /* We do not need to bother analyzing calls to unknown functions
1841 unless they may become known during lto/whopr. */
1842 if (!callee
->definition
&& !flag_lto
)
1845 ipa_compute_jump_functions_for_edge (fbi
, cs
);
1849 /* If STMT looks like a statement loading a value from a member pointer formal
1850 parameter, return that parameter and store the offset of the field to
1851 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1852 might be clobbered). If USE_DELTA, then we look for a use of the delta
1853 field rather than the pfn. */
1856 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
1857 HOST_WIDE_INT
*offset_p
)
1859 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1861 if (!gimple_assign_single_p (stmt
))
1864 rhs
= gimple_assign_rhs1 (stmt
);
1865 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1867 ref_field
= TREE_OPERAND (rhs
, 1);
1868 rhs
= TREE_OPERAND (rhs
, 0);
1871 ref_field
= NULL_TREE
;
1872 if (TREE_CODE (rhs
) != MEM_REF
)
1874 rec
= TREE_OPERAND (rhs
, 0);
1875 if (TREE_CODE (rec
) != ADDR_EXPR
)
1877 rec
= TREE_OPERAND (rec
, 0);
1878 if (TREE_CODE (rec
) != PARM_DECL
1879 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1881 ref_offset
= TREE_OPERAND (rhs
, 1);
1888 *offset_p
= int_bit_position (fld
);
1892 if (integer_nonzerop (ref_offset
))
1894 return ref_field
== fld
? rec
: NULL_TREE
;
1897 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1901 /* Returns true iff T is an SSA_NAME defined by a statement. */
1904 ipa_is_ssa_with_stmt_def (tree t
)
1906 if (TREE_CODE (t
) == SSA_NAME
1907 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1913 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1914 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1915 indirect call graph edge. */
1917 static struct cgraph_edge
*
1918 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
1921 struct cgraph_edge
*cs
;
1923 cs
= node
->get_edge (stmt
);
1924 cs
->indirect_info
->param_index
= param_index
;
1925 cs
->indirect_info
->agg_contents
= 0;
1926 cs
->indirect_info
->member_ptr
= 0;
1930 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1931 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1932 intermediate information about each formal parameter. Currently it checks
1933 whether the call calls a pointer that is a formal parameter and if so, the
1934 parameter is marked with the called flag and an indirect call graph edge
1935 describing the call is created. This is very simple for ordinary pointers
1936 represented in SSA but not-so-nice when it comes to member pointers. The
1937 ugly part of this function does nothing more than trying to match the
1938 pattern of such a call. An example of such a pattern is the gimple dump
1939 below, the call is on the last line:
1942 f$__delta_5 = f.__delta;
1943 f$__pfn_24 = f.__pfn;
1947 f$__delta_5 = MEM[(struct *)&f];
1948 f$__pfn_24 = MEM[(struct *)&f + 4B];
1950 and a few lines below:
1953 D.2496_3 = (int) f$__pfn_24;
1954 D.2497_4 = D.2496_3 & 1;
1961 D.2500_7 = (unsigned int) f$__delta_5;
1962 D.2501_8 = &S + D.2500_7;
1963 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1964 D.2503_10 = *D.2502_9;
1965 D.2504_12 = f$__pfn_24 + -1;
1966 D.2505_13 = (unsigned int) D.2504_12;
1967 D.2506_14 = D.2503_10 + D.2505_13;
1968 D.2507_15 = *D.2506_14;
1969 iftmp.11_16 = (String:: *) D.2507_15;
1972 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1973 D.2500_19 = (unsigned int) f$__delta_5;
1974 D.2508_20 = &S + D.2500_19;
1975 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1977 Such patterns are results of simple calls to a member pointer:
1979 int doprinting (int (MyString::* f)(int) const)
1981 MyString S ("somestring");
1986 Moreover, the function also looks for called pointers loaded from aggregates
1987 passed by value or reference. */
1990 ipa_analyze_indirect_call_uses (struct func_body_info
*fbi
, gcall
*call
,
1993 struct ipa_node_params
*info
= fbi
->info
;
1994 HOST_WIDE_INT offset
;
1997 if (SSA_NAME_IS_DEFAULT_DEF (target
))
1999 tree var
= SSA_NAME_VAR (target
);
2000 int index
= ipa_get_param_decl_index (info
, var
);
2002 ipa_note_param_call (fbi
->node
, index
, call
);
2007 gimple def
= SSA_NAME_DEF_STMT (target
);
2008 if (gimple_assign_single_p (def
)
2009 && ipa_load_from_parm_agg_1 (fbi
, info
->descriptors
, def
,
2010 gimple_assign_rhs1 (def
), &index
, &offset
,
2013 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2014 cs
->indirect_info
->offset
= offset
;
2015 cs
->indirect_info
->agg_contents
= 1;
2016 cs
->indirect_info
->by_ref
= by_ref
;
2020 /* Now we need to try to match the complex pattern of calling a member
2022 if (gimple_code (def
) != GIMPLE_PHI
2023 || gimple_phi_num_args (def
) != 2
2024 || !POINTER_TYPE_P (TREE_TYPE (target
))
2025 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2028 /* First, we need to check whether one of these is a load from a member
2029 pointer that is a parameter to this function. */
2030 tree n1
= PHI_ARG_DEF (def
, 0);
2031 tree n2
= PHI_ARG_DEF (def
, 1);
2032 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2034 gimple d1
= SSA_NAME_DEF_STMT (n1
);
2035 gimple d2
= SSA_NAME_DEF_STMT (n2
);
2038 basic_block bb
, virt_bb
;
2039 basic_block join
= gimple_bb (def
);
2040 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2042 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2045 bb
= EDGE_PRED (join
, 0)->src
;
2046 virt_bb
= gimple_bb (d2
);
2048 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2050 bb
= EDGE_PRED (join
, 1)->src
;
2051 virt_bb
= gimple_bb (d1
);
2056 /* Second, we need to check that the basic blocks are laid out in the way
2057 corresponding to the pattern. */
2059 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2060 || single_pred (virt_bb
) != bb
2061 || single_succ (virt_bb
) != join
)
2064 /* Third, let's see that the branching is done depending on the least
2065 significant bit of the pfn. */
2067 gimple branch
= last_stmt (bb
);
2068 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2071 if ((gimple_cond_code (branch
) != NE_EXPR
2072 && gimple_cond_code (branch
) != EQ_EXPR
)
2073 || !integer_zerop (gimple_cond_rhs (branch
)))
2076 tree cond
= gimple_cond_lhs (branch
);
2077 if (!ipa_is_ssa_with_stmt_def (cond
))
2080 def
= SSA_NAME_DEF_STMT (cond
);
2081 if (!is_gimple_assign (def
)
2082 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2083 || !integer_onep (gimple_assign_rhs2 (def
)))
2086 cond
= gimple_assign_rhs1 (def
);
2087 if (!ipa_is_ssa_with_stmt_def (cond
))
2090 def
= SSA_NAME_DEF_STMT (cond
);
2092 if (is_gimple_assign (def
)
2093 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2095 cond
= gimple_assign_rhs1 (def
);
2096 if (!ipa_is_ssa_with_stmt_def (cond
))
2098 def
= SSA_NAME_DEF_STMT (cond
);
2102 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2103 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2104 == ptrmemfunc_vbit_in_delta
),
2109 index
= ipa_get_param_decl_index (info
, rec
);
2111 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2113 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2114 cs
->indirect_info
->offset
= offset
;
2115 cs
->indirect_info
->agg_contents
= 1;
2116 cs
->indirect_info
->member_ptr
= 1;
2122 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2123 object referenced in the expression is a formal parameter of the caller
2124 FBI->node (described by FBI->info), create a call note for the
2128 ipa_analyze_virtual_call_uses (struct func_body_info
*fbi
,
2129 gcall
*call
, tree target
)
2131 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2133 HOST_WIDE_INT anc_offset
;
2135 if (!flag_devirtualize
)
2138 if (TREE_CODE (obj
) != SSA_NAME
)
2141 struct ipa_node_params
*info
= fbi
->info
;
2142 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2144 struct ipa_jump_func jfunc
;
2145 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2149 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2150 gcc_assert (index
>= 0);
2151 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2157 struct ipa_jump_func jfunc
;
2158 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
2161 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2164 index
= ipa_get_param_decl_index (info
,
2165 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2166 gcc_assert (index
>= 0);
2167 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2168 call
, &jfunc
, anc_offset
))
2172 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2173 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2174 ii
->offset
= anc_offset
;
2175 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2176 ii
->otr_type
= obj_type_ref_class (target
);
2177 ii
->polymorphic
= 1;
2180 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2181 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2182 containing intermediate information about each formal parameter. */
2185 ipa_analyze_call_uses (struct func_body_info
*fbi
, gcall
*call
)
2187 tree target
= gimple_call_fn (call
);
2190 || (TREE_CODE (target
) != SSA_NAME
2191 && !virtual_method_call_p (target
)))
2194 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2195 /* If we previously turned the call into a direct call, there is
2196 no need to analyze. */
2197 if (cs
&& !cs
->indirect_unknown_callee
)
2200 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2203 tree target
= gimple_call_fn (call
);
2204 ipa_polymorphic_call_context
context (current_function_decl
,
2205 target
, call
, &instance
);
2207 gcc_checking_assert (cs
->indirect_info
->otr_type
2208 == obj_type_ref_class (target
));
2209 gcc_checking_assert (cs
->indirect_info
->otr_token
2210 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2212 cs
->indirect_info
->vptr_changed
2213 = !context
.get_dynamic_type (instance
,
2214 OBJ_TYPE_REF_OBJECT (target
),
2215 obj_type_ref_class (target
), call
);
2216 cs
->indirect_info
->context
= context
;
2219 if (TREE_CODE (target
) == SSA_NAME
)
2220 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2221 else if (virtual_method_call_p (target
))
2222 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2226 /* Analyze the call statement STMT with respect to formal parameters (described
2227 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2228 formal parameters are called. */
2231 ipa_analyze_stmt_uses (struct func_body_info
*fbi
, gimple stmt
)
2233 if (is_gimple_call (stmt
))
2234 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2237 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2238 If OP is a parameter declaration, mark it as used in the info structure
2242 visit_ref_for_mod_analysis (gimple
, tree op
, tree
, void *data
)
2244 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2246 op
= get_base_address (op
);
2248 && TREE_CODE (op
) == PARM_DECL
)
2250 int index
= ipa_get_param_decl_index (info
, op
);
2251 gcc_assert (index
>= 0);
2252 ipa_set_param_used (info
, index
, true);
2258 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2259 the findings in various structures of the associated ipa_node_params
2260 structure, such as parameter flags, notes etc. FBI holds various data about
2261 the function being analyzed. */
2264 ipa_analyze_params_uses_in_bb (struct func_body_info
*fbi
, basic_block bb
)
2266 gimple_stmt_iterator gsi
;
2267 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2269 gimple stmt
= gsi_stmt (gsi
);
2271 if (is_gimple_debug (stmt
))
2274 ipa_analyze_stmt_uses (fbi
, stmt
);
2275 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2276 visit_ref_for_mod_analysis
,
2277 visit_ref_for_mod_analysis
,
2278 visit_ref_for_mod_analysis
);
2280 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2281 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2282 visit_ref_for_mod_analysis
,
2283 visit_ref_for_mod_analysis
,
2284 visit_ref_for_mod_analysis
);
2287 /* Calculate controlled uses of parameters of NODE. */
2290 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2292 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2294 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2296 tree parm
= ipa_get_param (info
, i
);
2297 int controlled_uses
= 0;
2299 /* For SSA regs see if parameter is used. For non-SSA we compute
2300 the flag during modification analysis. */
2301 if (is_gimple_reg (parm
))
2303 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2305 if (ddef
&& !has_zero_uses (ddef
))
2307 imm_use_iterator imm_iter
;
2308 use_operand_p use_p
;
2310 ipa_set_param_used (info
, i
, true);
2311 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2312 if (!is_gimple_call (USE_STMT (use_p
)))
2314 if (!is_gimple_debug (USE_STMT (use_p
)))
2316 controlled_uses
= IPA_UNDESCRIBED_USE
;
2324 controlled_uses
= 0;
2327 controlled_uses
= IPA_UNDESCRIBED_USE
;
2328 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2332 /* Free stuff in BI. */
2335 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2337 bi
->cg_edges
.release ();
2338 bi
->param_aa_statuses
.release ();
2341 /* Dominator walker driving the analysis. */
2343 class analysis_dom_walker
: public dom_walker
2346 analysis_dom_walker (struct func_body_info
*fbi
)
2347 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2349 virtual void before_dom_children (basic_block
);
2352 struct func_body_info
*m_fbi
;
2356 analysis_dom_walker::before_dom_children (basic_block bb
)
2358 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2359 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2362 /* Initialize the array describing properties of of formal parameters
2363 of NODE, analyze their uses and compute jump functions associated
2364 with actual arguments of calls from within NODE. */
2367 ipa_analyze_node (struct cgraph_node
*node
)
2369 struct func_body_info fbi
;
2370 struct ipa_node_params
*info
;
2372 ipa_check_create_node_params ();
2373 ipa_check_create_edge_args ();
2374 info
= IPA_NODE_REF (node
);
2376 if (info
->analysis_done
)
2378 info
->analysis_done
= 1;
2380 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2382 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2384 ipa_set_param_used (info
, i
, true);
2385 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2390 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2392 calculate_dominance_info (CDI_DOMINATORS
);
2393 ipa_initialize_node_params (node
);
2394 ipa_analyze_controlled_uses (node
);
2397 fbi
.info
= IPA_NODE_REF (node
);
2398 fbi
.bb_infos
= vNULL
;
2399 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2400 fbi
.param_count
= ipa_get_param_count (info
);
2403 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2405 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2406 bi
->cg_edges
.safe_push (cs
);
2409 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2411 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2412 bi
->cg_edges
.safe_push (cs
);
2415 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2418 struct ipa_bb_info
*bi
;
2419 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
2420 free_ipa_bb_info (bi
);
2421 fbi
.bb_infos
.release ();
2422 free_dominance_info (CDI_DOMINATORS
);
2426 /* Update the jump functions associated with call graph edge E when the call
2427 graph edge CS is being inlined, assuming that E->caller is already (possibly
2428 indirectly) inlined into CS->callee and that E has not been inlined. */
2431 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2432 struct cgraph_edge
*e
)
2434 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2435 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2436 int count
= ipa_get_cs_argument_count (args
);
2439 for (i
= 0; i
< count
; i
++)
2441 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2442 struct ipa_polymorphic_call_context
*dst_ctx
2443 = ipa_get_ith_polymorhic_call_context (args
, i
);
2445 if (dst
->type
== IPA_JF_ANCESTOR
)
2447 struct ipa_jump_func
*src
;
2448 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2449 struct ipa_polymorphic_call_context
*src_ctx
2450 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2452 /* Variable number of arguments can cause havoc if we try to access
2453 one that does not exist in the inlined edge. So make sure we
2455 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2457 ipa_set_jf_unknown (dst
);
2461 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2463 if (src_ctx
&& !src_ctx
->useless_p ())
2465 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2467 /* TODO: Make type preserved safe WRT contexts. */
2468 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2469 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2470 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2471 if (!ctx
.useless_p ())
2473 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2475 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2477 dst_ctx
->combine_with (ctx
);
2481 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2483 struct ipa_agg_jf_item
*item
;
2486 /* Currently we do not produce clobber aggregate jump functions,
2487 replace with merging when we do. */
2488 gcc_assert (!dst
->agg
.items
);
2490 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2491 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2492 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2493 item
->offset
-= dst
->value
.ancestor
.offset
;
2496 if (src
->type
== IPA_JF_PASS_THROUGH
2497 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2499 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2500 dst
->value
.ancestor
.agg_preserved
&=
2501 src
->value
.pass_through
.agg_preserved
;
2503 else if (src
->type
== IPA_JF_ANCESTOR
)
2505 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2506 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2507 dst
->value
.ancestor
.agg_preserved
&=
2508 src
->value
.ancestor
.agg_preserved
;
2511 ipa_set_jf_unknown (dst
);
2513 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2515 struct ipa_jump_func
*src
;
2516 /* We must check range due to calls with variable number of arguments
2517 and we cannot combine jump functions with operations. */
2518 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2519 && (dst
->value
.pass_through
.formal_id
2520 < ipa_get_cs_argument_count (top
)))
2522 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2523 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2524 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2525 struct ipa_polymorphic_call_context
*src_ctx
2526 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2528 if (src_ctx
&& !src_ctx
->useless_p ())
2530 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2532 /* TODO: Make type preserved safe WRT contexts. */
2533 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2534 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2535 if (!ctx
.useless_p ())
2539 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2541 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2543 dst_ctx
->combine_with (ctx
);
2548 case IPA_JF_UNKNOWN
:
2549 ipa_set_jf_unknown (dst
);
2552 ipa_set_jf_cst_copy (dst
, src
);
2555 case IPA_JF_PASS_THROUGH
:
2557 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2558 enum tree_code operation
;
2559 operation
= ipa_get_jf_pass_through_operation (src
);
2561 if (operation
== NOP_EXPR
)
2565 && ipa_get_jf_pass_through_agg_preserved (src
);
2566 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2570 tree operand
= ipa_get_jf_pass_through_operand (src
);
2571 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2576 case IPA_JF_ANCESTOR
:
2580 && ipa_get_jf_ancestor_agg_preserved (src
);
2581 ipa_set_ancestor_jf (dst
,
2582 ipa_get_jf_ancestor_offset (src
),
2583 ipa_get_jf_ancestor_formal_id (src
),
2592 && (dst_agg_p
|| !src
->agg
.by_ref
))
2594 /* Currently we do not produce clobber aggregate jump
2595 functions, replace with merging when we do. */
2596 gcc_assert (!dst
->agg
.items
);
2598 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2599 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2603 ipa_set_jf_unknown (dst
);
2608 /* If TARGET is an addr_expr of a function declaration, make it the
2609 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2610 Otherwise, return NULL. */
2612 struct cgraph_edge
*
2613 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2616 struct cgraph_node
*callee
;
2617 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2618 bool unreachable
= false;
2620 if (TREE_CODE (target
) == ADDR_EXPR
)
2621 target
= TREE_OPERAND (target
, 0);
2622 if (TREE_CODE (target
) != FUNCTION_DECL
)
2624 target
= canonicalize_constructor_val (target
, NULL
);
2625 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2627 /* Member pointer call that goes through a VMT lookup. */
2628 if (ie
->indirect_info
->member_ptr
2629 /* Or if target is not an invariant expression and we do not
2630 know if it will evaulate to function at runtime.
2631 This can happen when folding through &VAR, where &VAR
2632 is IP invariant, but VAR itself is not.
2634 TODO: Revisit this when GCC 5 is branched. It seems that
2635 member_ptr check is not needed and that we may try to fold
2636 the expression and see if VAR is readonly. */
2637 || !is_gimple_ip_invariant (target
))
2639 if (dump_enabled_p ())
2641 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2642 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2643 "discovered direct call non-invariant "
2645 ie
->caller
->name (), ie
->caller
->order
);
2651 if (dump_enabled_p ())
2653 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2654 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2655 "discovered direct call to non-function in %s/%i, "
2656 "making it __builtin_unreachable\n",
2657 ie
->caller
->name (), ie
->caller
->order
);
2660 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2661 callee
= cgraph_node::get_create (target
);
2665 callee
= cgraph_node::get (target
);
2668 callee
= cgraph_node::get (target
);
2670 /* Because may-edges are not explicitely represented and vtable may be external,
2671 we may create the first reference to the object in the unit. */
2672 if (!callee
|| callee
->global
.inlined_to
)
2675 /* We are better to ensure we can refer to it.
2676 In the case of static functions we are out of luck, since we already
2677 removed its body. In the case of public functions we may or may
2678 not introduce the reference. */
2679 if (!canonicalize_constructor_val (target
, NULL
)
2680 || !TREE_PUBLIC (target
))
2683 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2684 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2685 xstrdup_for_dump (ie
->caller
->name ()),
2687 xstrdup_for_dump (ie
->callee
->name ()),
2691 callee
= cgraph_node::get_create (target
);
2694 /* If the edge is already speculated. */
2695 if (speculative
&& ie
->speculative
)
2697 struct cgraph_edge
*e2
;
2698 struct ipa_ref
*ref
;
2699 ie
->speculative_call_info (e2
, ie
, ref
);
2700 if (e2
->callee
->ultimate_alias_target ()
2701 != callee
->ultimate_alias_target ())
2704 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2705 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2706 xstrdup_for_dump (ie
->caller
->name ()),
2708 xstrdup_for_dump (callee
->name ()),
2710 xstrdup_for_dump (e2
->callee
->name ()),
2716 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2717 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2718 xstrdup_for_dump (ie
->caller
->name ()),
2720 xstrdup_for_dump (callee
->name ()),
2726 if (!dbg_cnt (devirt
))
2729 ipa_check_create_node_params ();
2731 /* We can not make edges to inline clones. It is bug that someone removed
2732 the cgraph node too early. */
2733 gcc_assert (!callee
->global
.inlined_to
);
2735 if (dump_file
&& !unreachable
)
2737 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2738 "(%s/%i -> %s/%i), for stmt ",
2739 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2740 speculative
? "speculative" : "known",
2741 xstrdup_for_dump (ie
->caller
->name ()),
2743 xstrdup_for_dump (callee
->name ()),
2746 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2748 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2750 if (dump_enabled_p ())
2752 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2754 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2755 "converting indirect call in %s to direct call to %s\n",
2756 ie
->caller
->name (), callee
->name ());
2760 struct cgraph_edge
*orig
= ie
;
2761 ie
= ie
->make_direct (callee
);
2762 /* If we resolved speculative edge the cost is already up to date
2763 for direct call (adjusted by inline_edge_duplication_hook). */
2766 es
= inline_edge_summary (ie
);
2767 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2768 - eni_size_weights
.call_cost
);
2769 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2770 - eni_time_weights
.call_cost
);
2775 if (!callee
->can_be_discarded_p ())
2778 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2782 /* make_speculative will update ie's cost to direct call cost. */
2783 ie
= ie
->make_speculative
2784 (callee
, ie
->count
* 8 / 10, ie
->frequency
* 8 / 10);
2790 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2791 return NULL if there is not any. BY_REF specifies whether the value has to
2792 be passed by reference or by value. */
2795 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2796 HOST_WIDE_INT offset
, bool by_ref
)
2798 struct ipa_agg_jf_item
*item
;
2801 if (by_ref
!= agg
->by_ref
)
2804 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2805 if (item
->offset
== offset
)
2807 /* Currently we do not have clobber values, return NULL for them once
2809 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2815 /* Remove a reference to SYMBOL from the list of references of a node given by
2816 reference description RDESC. Return true if the reference has been
2817 successfully found and removed. */
2820 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2822 struct ipa_ref
*to_del
;
2823 struct cgraph_edge
*origin
;
2828 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
2829 origin
->lto_stmt_uid
);
2833 to_del
->remove_reference ();
2835 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2836 xstrdup_for_dump (origin
->caller
->name ()),
2837 origin
->caller
->order
, xstrdup_for_dump (symbol
->name ()));
2841 /* If JFUNC has a reference description with refcount different from
2842 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2843 NULL. JFUNC must be a constant jump function. */
2845 static struct ipa_cst_ref_desc
*
2846 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2848 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2849 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2855 /* If the value of constant jump function JFUNC is an address of a function
2856 declaration, return the associated call graph node. Otherwise return
2859 static cgraph_node
*
2860 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2862 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2863 tree cst
= ipa_get_jf_constant (jfunc
);
2864 if (TREE_CODE (cst
) != ADDR_EXPR
2865 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2868 return cgraph_node::get (TREE_OPERAND (cst
, 0));
2872 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2873 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2874 the edge specified in the rdesc. Return false if either the symbol or the
2875 reference could not be found, otherwise return true. */
2878 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2880 struct ipa_cst_ref_desc
*rdesc
;
2881 if (jfunc
->type
== IPA_JF_CONST
2882 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2883 && --rdesc
->refcount
== 0)
2885 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2889 return remove_described_reference (symbol
, rdesc
);
2894 /* Try to find a destination for indirect edge IE that corresponds to a simple
2895 call or a call of a member function pointer and where the destination is a
2896 pointer formal parameter described by jump function JFUNC. If it can be
2897 determined, return the newly direct edge, otherwise return NULL.
2898 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2900 static struct cgraph_edge
*
2901 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2902 struct ipa_jump_func
*jfunc
,
2903 struct ipa_node_params
*new_root_info
)
2905 struct cgraph_edge
*cs
;
2907 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2909 if (ie
->indirect_info
->agg_contents
)
2910 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2911 ie
->indirect_info
->offset
,
2912 ie
->indirect_info
->by_ref
);
2914 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2917 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2919 if (cs
&& !agg_contents
)
2922 gcc_checking_assert (cs
->callee
2924 || jfunc
->type
!= IPA_JF_CONST
2925 || !cgraph_node_for_jfunc (jfunc
)
2926 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
2927 ok
= try_decrement_rdesc_refcount (jfunc
);
2928 gcc_checking_assert (ok
);
2934 /* Return the target to be used in cases of impossible devirtualization. IE
2935 and target (the latter can be NULL) are dumped when dumping is enabled. */
2938 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
2944 "Type inconsistent devirtualization: %s/%i->%s\n",
2945 ie
->caller
->name (), ie
->caller
->order
,
2946 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
2949 "No devirtualization target in %s/%i\n",
2950 ie
->caller
->name (), ie
->caller
->order
);
2952 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2953 cgraph_node::get_create (new_target
);
2957 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2958 call based on a formal parameter which is described by jump function JFUNC
2959 and if it can be determined, make it direct and return the direct edge.
2960 Otherwise, return NULL. CTX describes the polymorphic context that the
2961 parameter the call is based on brings along with it. */
2963 static struct cgraph_edge
*
2964 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2965 struct ipa_jump_func
*jfunc
,
2966 struct ipa_polymorphic_call_context ctx
)
2969 bool speculative
= false;
2971 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
2974 gcc_assert (!ie
->indirect_info
->by_ref
);
2976 /* Try to do lookup via known virtual table pointer value. */
2977 if (!ie
->indirect_info
->vptr_changed
2978 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
2981 unsigned HOST_WIDE_INT offset
;
2982 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2983 ie
->indirect_info
->offset
,
2985 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
2987 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
2991 if ((TREE_CODE (TREE_TYPE (t
)) == FUNCTION_TYPE
2992 && DECL_FUNCTION_CODE (t
) == BUILT_IN_UNREACHABLE
)
2993 || !possible_polymorphic_call_target_p
2994 (ie
, cgraph_node::get (t
)))
2996 /* Do not speculate builtin_unreachable, it is stupid! */
2997 if (!ie
->indirect_info
->vptr_changed
)
2998 target
= ipa_impossible_devirt_target (ie
, target
);
3003 speculative
= ie
->indirect_info
->vptr_changed
;
3009 ipa_polymorphic_call_context
ie_context (ie
);
3010 vec
<cgraph_node
*>targets
;
3013 ctx
.offset_by (ie
->indirect_info
->offset
);
3014 if (ie
->indirect_info
->vptr_changed
)
3015 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3016 ie
->indirect_info
->otr_type
);
3017 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3018 targets
= possible_polymorphic_call_targets
3019 (ie
->indirect_info
->otr_type
,
3020 ie
->indirect_info
->otr_token
,
3022 if (final
&& targets
.length () <= 1)
3024 speculative
= false;
3025 if (targets
.length () == 1)
3026 target
= targets
[0]->decl
;
3028 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3030 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3031 && !ie
->speculative
&& ie
->maybe_hot_p ())
3034 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3035 ie
->indirect_info
->otr_token
,
3036 ie
->indirect_info
->context
);
3046 if (!possible_polymorphic_call_target_p
3047 (ie
, cgraph_node::get_create (target
)))
3051 target
= ipa_impossible_devirt_target (ie
, target
);
3053 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3059 /* Update the param called notes associated with NODE when CS is being inlined,
3060 assuming NODE is (potentially indirectly) inlined into CS->callee.
3061 Moreover, if the callee is discovered to be constant, create a new cgraph
3062 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3063 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3066 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3067 struct cgraph_node
*node
,
3068 vec
<cgraph_edge
*> *new_edges
)
3070 struct ipa_edge_args
*top
;
3071 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3072 struct ipa_node_params
*new_root_info
;
3075 ipa_check_create_edge_args ();
3076 top
= IPA_EDGE_REF (cs
);
3077 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
3078 ? cs
->caller
->global
.inlined_to
3081 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3083 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3084 struct ipa_jump_func
*jfunc
;
3086 cgraph_node
*spec_target
= NULL
;
3088 next_ie
= ie
->next_callee
;
3090 if (ici
->param_index
== -1)
3093 /* We must check range due to calls with variable number of arguments: */
3094 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3096 ici
->param_index
= -1;
3100 param_index
= ici
->param_index
;
3101 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3103 if (ie
->speculative
)
3105 struct cgraph_edge
*de
;
3106 struct ipa_ref
*ref
;
3107 ie
->speculative_call_info (de
, ie
, ref
);
3108 spec_target
= de
->callee
;
3111 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3112 new_direct_edge
= NULL
;
3113 else if (ici
->polymorphic
)
3115 ipa_polymorphic_call_context ctx
;
3116 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3117 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3120 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3122 /* If speculation was removed, then we need to do nothing. */
3123 if (new_direct_edge
&& new_direct_edge
!= ie
3124 && new_direct_edge
->callee
== spec_target
)
3126 new_direct_edge
->indirect_inlining_edge
= 1;
3127 top
= IPA_EDGE_REF (cs
);
3129 if (!new_direct_edge
->speculative
)
3132 else if (new_direct_edge
)
3134 new_direct_edge
->indirect_inlining_edge
= 1;
3135 if (new_direct_edge
->call_stmt
)
3136 new_direct_edge
->call_stmt_cannot_inline_p
3137 = !gimple_check_call_matching_types (
3138 new_direct_edge
->call_stmt
,
3139 new_direct_edge
->callee
->decl
, false);
3142 new_edges
->safe_push (new_direct_edge
);
3145 top
= IPA_EDGE_REF (cs
);
3146 /* If speculative edge was introduced we still need to update
3147 call info of the indirect edge. */
3148 if (!new_direct_edge
->speculative
)
3151 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3152 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3154 if (ici
->agg_contents
3155 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3156 && !ici
->polymorphic
)
3157 ici
->param_index
= -1;
3160 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3161 if (ici
->polymorphic
3162 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3163 ici
->vptr_changed
= true;
3166 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3168 if (ici
->agg_contents
3169 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3170 && !ici
->polymorphic
)
3171 ici
->param_index
= -1;
3174 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3175 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3176 if (ici
->polymorphic
3177 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3178 ici
->vptr_changed
= true;
3182 /* Either we can find a destination for this edge now or never. */
3183 ici
->param_index
= -1;
3189 /* Recursively traverse subtree of NODE (including node) made of inlined
3190 cgraph_edges when CS has been inlined and invoke
3191 update_indirect_edges_after_inlining on all nodes and
3192 update_jump_functions_after_inlining on all non-inlined edges that lead out
3193 of this subtree. Newly discovered indirect edges will be added to
3194 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3198 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3199 struct cgraph_node
*node
,
3200 vec
<cgraph_edge
*> *new_edges
)
3202 struct cgraph_edge
*e
;
3205 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3207 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3208 if (!e
->inline_failed
)
3209 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3211 update_jump_functions_after_inlining (cs
, e
);
3212 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3213 update_jump_functions_after_inlining (cs
, e
);
3218 /* Combine two controlled uses counts as done during inlining. */
3221 combine_controlled_uses_counters (int c
, int d
)
3223 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3224 return IPA_UNDESCRIBED_USE
;
3229 /* Propagate number of controlled users from CS->caleee to the new root of the
3230 tree of inlined nodes. */
3233 propagate_controlled_uses (struct cgraph_edge
*cs
)
3235 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3236 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3237 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3238 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3239 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3242 count
= MIN (ipa_get_cs_argument_count (args
),
3243 ipa_get_param_count (old_root_info
));
3244 for (i
= 0; i
< count
; i
++)
3246 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3247 struct ipa_cst_ref_desc
*rdesc
;
3249 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3252 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3253 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3254 d
= ipa_get_controlled_uses (old_root_info
, i
);
3256 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3257 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3258 c
= combine_controlled_uses_counters (c
, d
);
3259 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3260 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3262 struct cgraph_node
*n
;
3263 struct ipa_ref
*ref
;
3264 tree t
= new_root_info
->known_csts
[src_idx
];
3266 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3267 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3268 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3269 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3272 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3273 "reference from %s/%i to %s/%i.\n",
3274 xstrdup_for_dump (new_root
->name ()),
3276 xstrdup_for_dump (n
->name ()), n
->order
);
3277 ref
->remove_reference ();
3281 else if (jf
->type
== IPA_JF_CONST
3282 && (rdesc
= jfunc_rdesc_usable (jf
)))
3284 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3285 int c
= rdesc
->refcount
;
3286 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3287 if (rdesc
->refcount
== 0)
3289 tree cst
= ipa_get_jf_constant (jf
);
3290 struct cgraph_node
*n
;
3291 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3292 && TREE_CODE (TREE_OPERAND (cst
, 0))
3294 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3297 struct cgraph_node
*clone
;
3299 ok
= remove_described_reference (n
, rdesc
);
3300 gcc_checking_assert (ok
);
3303 while (clone
->global
.inlined_to
3304 && clone
!= rdesc
->cs
->caller
3305 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3307 struct ipa_ref
*ref
;
3308 ref
= clone
->find_reference (n
, NULL
, 0);
3312 fprintf (dump_file
, "ipa-prop: Removing "
3313 "cloning-created reference "
3314 "from %s/%i to %s/%i.\n",
3315 xstrdup_for_dump (clone
->name ()),
3317 xstrdup_for_dump (n
->name ()),
3319 ref
->remove_reference ();
3321 clone
= clone
->callers
->caller
;
3328 for (i
= ipa_get_param_count (old_root_info
);
3329 i
< ipa_get_cs_argument_count (args
);
3332 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3334 if (jf
->type
== IPA_JF_CONST
)
3336 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3338 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3340 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3341 ipa_set_controlled_uses (new_root_info
,
3342 jf
->value
.pass_through
.formal_id
,
3343 IPA_UNDESCRIBED_USE
);
3347 /* Update jump functions and call note functions on inlining the call site CS.
3348 CS is expected to lead to a node already cloned by
3349 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3350 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3354 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3355 vec
<cgraph_edge
*> *new_edges
)
3358 /* Do nothing if the preparation phase has not been carried out yet
3359 (i.e. during early inlining). */
3360 if (!ipa_node_params_sum
)
3362 gcc_assert (ipa_edge_args_vector
);
3364 propagate_controlled_uses (cs
);
3365 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3370 /* Frees all dynamically allocated structures that the argument info points
3374 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3376 vec_free (args
->jump_functions
);
3377 memset (args
, 0, sizeof (*args
));
3380 /* Free all ipa_edge structures. */
3383 ipa_free_all_edge_args (void)
3386 struct ipa_edge_args
*args
;
3388 if (!ipa_edge_args_vector
)
3391 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3392 ipa_free_edge_args_substructures (args
);
3394 vec_free (ipa_edge_args_vector
);
3397 /* Frees all dynamically allocated structures that the param info points
3400 ipa_node_params::~ipa_node_params ()
3402 descriptors
.release ();
3404 /* Lattice values and their sources are deallocated with their alocation
3406 known_contexts
.release ();
3409 ipcp_orig_node
= NULL
;
3412 do_clone_for_all_contexts
= 0;
3413 is_all_contexts_clone
= 0;
3417 /* Free all ipa_node_params structures. */
3420 ipa_free_all_node_params (void)
3422 delete ipa_node_params_sum
;
3423 ipa_node_params_sum
= NULL
;
3426 /* Grow ipcp_transformations if necessary. */
3429 ipcp_grow_transformations_if_necessary (void)
3431 if (vec_safe_length (ipcp_transformations
)
3432 <= (unsigned) symtab
->cgraph_max_uid
)
3433 vec_safe_grow_cleared (ipcp_transformations
, symtab
->cgraph_max_uid
+ 1);
3436 /* Set the aggregate replacements of NODE to be AGGVALS. */
3439 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3440 struct ipa_agg_replacement_value
*aggvals
)
3442 ipcp_grow_transformations_if_necessary ();
3443 (*ipcp_transformations
)[node
->uid
].agg_values
= aggvals
;
3446 /* Hook that is called by cgraph.c when an edge is removed. */
3449 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3451 struct ipa_edge_args
*args
;
3453 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3454 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3457 args
= IPA_EDGE_REF (cs
);
3458 if (args
->jump_functions
)
3460 struct ipa_jump_func
*jf
;
3462 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3464 struct ipa_cst_ref_desc
*rdesc
;
3465 try_decrement_rdesc_refcount (jf
);
3466 if (jf
->type
== IPA_JF_CONST
3467 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3473 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3476 /* Hook that is called by cgraph.c when an edge is duplicated. */
3479 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3482 struct ipa_edge_args
*old_args
, *new_args
;
3485 ipa_check_create_edge_args ();
3487 old_args
= IPA_EDGE_REF (src
);
3488 new_args
= IPA_EDGE_REF (dst
);
3490 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3491 if (old_args
->polymorphic_call_contexts
)
3492 new_args
->polymorphic_call_contexts
3493 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3495 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3497 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3498 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3500 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3502 if (src_jf
->type
== IPA_JF_CONST
)
3504 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3507 dst_jf
->value
.constant
.rdesc
= NULL
;
3508 else if (src
->caller
== dst
->caller
)
3510 struct ipa_ref
*ref
;
3511 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3512 gcc_checking_assert (n
);
3513 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3515 gcc_checking_assert (ref
);
3516 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3518 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3519 dst_rdesc
->cs
= dst
;
3520 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3521 dst_rdesc
->next_duplicate
= NULL
;
3522 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3524 else if (src_rdesc
->cs
== src
)
3526 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3527 dst_rdesc
->cs
= dst
;
3528 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3529 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3530 src_rdesc
->next_duplicate
= dst_rdesc
;
3531 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3535 struct ipa_cst_ref_desc
*dst_rdesc
;
3536 /* This can happen during inlining, when a JFUNC can refer to a
3537 reference taken in a function up in the tree of inline clones.
3538 We need to find the duplicate that refers to our tree of
3541 gcc_assert (dst
->caller
->global
.inlined_to
);
3542 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3544 dst_rdesc
= dst_rdesc
->next_duplicate
)
3546 struct cgraph_node
*top
;
3547 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3548 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3549 : dst_rdesc
->cs
->caller
;
3550 if (dst
->caller
->global
.inlined_to
== top
)
3553 gcc_assert (dst_rdesc
);
3554 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3557 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3558 && src
->caller
== dst
->caller
)
3560 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3561 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3562 struct ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3563 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3565 int c
= ipa_get_controlled_uses (root_info
, idx
);
3566 if (c
!= IPA_UNDESCRIBED_USE
)
3569 ipa_set_controlled_uses (root_info
, idx
, c
);
3575 /* Analyze newly added function into callgraph. */
3578 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3580 if (node
->has_gimple_body_p ())
3581 ipa_analyze_node (node
);
3584 /* Hook that is called by summary when a node is duplicated. */
3587 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3588 ipa_node_params
*old_info
,
3589 ipa_node_params
*new_info
)
3591 ipa_agg_replacement_value
*old_av
, *new_av
;
3593 new_info
->descriptors
= old_info
->descriptors
.copy ();
3594 new_info
->lattices
= NULL
;
3595 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3597 new_info
->analysis_done
= old_info
->analysis_done
;
3598 new_info
->node_enqueued
= old_info
->node_enqueued
;
3600 old_av
= ipa_get_agg_replacements_for_node (src
);
3606 struct ipa_agg_replacement_value
*v
;
3608 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3609 memcpy (v
, old_av
, sizeof (*v
));
3612 old_av
= old_av
->next
;
3614 ipa_set_node_agg_value_chain (dst
, new_av
);
3617 ipcp_transformation_summary
*src_trans
= ipcp_get_transformation_summary (src
);
3619 if (src_trans
&& vec_safe_length (src_trans
->alignments
) > 0)
3621 ipcp_grow_transformations_if_necessary ();
3622 src_trans
= ipcp_get_transformation_summary (src
);
3623 const vec
<ipa_alignment
, va_gc
> *src_alignments
= src_trans
->alignments
;
3624 vec
<ipa_alignment
, va_gc
> *&dst_alignments
3625 = ipcp_get_transformation_summary (dst
)->alignments
;
3626 vec_safe_reserve_exact (dst_alignments
, src_alignments
->length ());
3627 for (unsigned i
= 0; i
< src_alignments
->length (); ++i
)
3628 dst_alignments
->quick_push ((*src_alignments
)[i
]);
3632 /* Register our cgraph hooks if they are not already there. */
3635 ipa_register_cgraph_hooks (void)
3637 ipa_check_create_node_params ();
3639 if (!edge_removal_hook_holder
)
3640 edge_removal_hook_holder
=
3641 symtab
->add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3642 if (!edge_duplication_hook_holder
)
3643 edge_duplication_hook_holder
=
3644 symtab
->add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3645 function_insertion_hook_holder
=
3646 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3649 /* Unregister our cgraph hooks if they are not already there. */
3652 ipa_unregister_cgraph_hooks (void)
3654 symtab
->remove_edge_removal_hook (edge_removal_hook_holder
);
3655 edge_removal_hook_holder
= NULL
;
3656 symtab
->remove_edge_duplication_hook (edge_duplication_hook_holder
);
3657 edge_duplication_hook_holder
= NULL
;
3658 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3659 function_insertion_hook_holder
= NULL
;
3662 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3663 longer needed after ipa-cp. */
3666 ipa_free_all_structures_after_ipa_cp (void)
3668 if (!optimize
&& !in_lto_p
)
3670 ipa_free_all_edge_args ();
3671 ipa_free_all_node_params ();
3672 free_alloc_pool (ipcp_sources_pool
);
3673 free_alloc_pool (ipcp_cst_values_pool
);
3674 free_alloc_pool (ipcp_poly_ctx_values_pool
);
3675 free_alloc_pool (ipcp_agg_lattice_pool
);
3676 ipa_unregister_cgraph_hooks ();
3677 ipa_refdesc_pool
.release ();
3681 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3682 longer needed after indirect inlining. */
3685 ipa_free_all_structures_after_iinln (void)
3687 ipa_free_all_edge_args ();
3688 ipa_free_all_node_params ();
3689 ipa_unregister_cgraph_hooks ();
3690 if (ipcp_sources_pool
)
3691 free_alloc_pool (ipcp_sources_pool
);
3692 if (ipcp_cst_values_pool
)
3693 free_alloc_pool (ipcp_cst_values_pool
);
3694 if (ipcp_poly_ctx_values_pool
)
3695 free_alloc_pool (ipcp_poly_ctx_values_pool
);
3696 if (ipcp_agg_lattice_pool
)
3697 free_alloc_pool (ipcp_agg_lattice_pool
);
3698 ipa_refdesc_pool
.release ();
3701 /* Print ipa_tree_map data structures of all functions in the
3705 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3708 struct ipa_node_params
*info
;
3710 if (!node
->definition
)
3712 info
= IPA_NODE_REF (node
);
3713 fprintf (f
, " function %s/%i parameter descriptors:\n",
3714 node
->name (), node
->order
);
3715 count
= ipa_get_param_count (info
);
3716 for (i
= 0; i
< count
; i
++)
3721 ipa_dump_param (f
, info
, i
);
3722 if (ipa_is_param_used (info
, i
))
3723 fprintf (f
, " used");
3724 c
= ipa_get_controlled_uses (info
, i
);
3725 if (c
== IPA_UNDESCRIBED_USE
)
3726 fprintf (f
, " undescribed_use");
3728 fprintf (f
, " controlled_uses=%i", c
);
3733 /* Print ipa_tree_map data structures of all functions in the
3737 ipa_print_all_params (FILE * f
)
3739 struct cgraph_node
*node
;
3741 fprintf (f
, "\nFunction parameters:\n");
3742 FOR_EACH_FUNCTION (node
)
3743 ipa_print_node_params (f
, node
);
3746 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3749 ipa_get_vector_of_formal_parms (tree fndecl
)
3755 gcc_assert (!flag_wpa
);
3756 count
= count_formal_params (fndecl
);
3757 args
.create (count
);
3758 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3759 args
.quick_push (parm
);
3764 /* Return a heap allocated vector containing types of formal parameters of
3765 function type FNTYPE. */
3768 ipa_get_vector_of_formal_parm_types (tree fntype
)
3774 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3777 types
.create (count
);
3778 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3779 types
.quick_push (TREE_VALUE (t
));
3784 /* Modify the function declaration FNDECL and its type according to the plan in
3785 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3786 to reflect the actual parameters being modified which are determined by the
3787 base_index field. */
3790 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3792 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3793 tree orig_type
= TREE_TYPE (fndecl
);
3794 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3796 /* The following test is an ugly hack, some functions simply don't have any
3797 arguments in their type. This is probably a bug but well... */
3798 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3799 bool last_parm_void
;
3803 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3805 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3807 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3809 gcc_assert (oparms
.length () == otypes
.length ());
3813 last_parm_void
= false;
3817 int len
= adjustments
.length ();
3818 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3819 tree new_arg_types
= NULL
;
3820 for (int i
= 0; i
< len
; i
++)
3822 struct ipa_parm_adjustment
*adj
;
3825 adj
= &adjustments
[i
];
3827 if (adj
->op
== IPA_PARM_OP_NEW
)
3830 parm
= oparms
[adj
->base_index
];
3833 if (adj
->op
== IPA_PARM_OP_COPY
)
3836 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3839 link
= &DECL_CHAIN (parm
);
3841 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3847 ptype
= build_pointer_type (adj
->type
);
3851 if (is_gimple_reg_type (ptype
))
3853 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3854 if (TYPE_ALIGN (ptype
) < malign
)
3855 ptype
= build_aligned_type (ptype
, malign
);
3860 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3862 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3864 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3865 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3866 DECL_ARTIFICIAL (new_parm
) = 1;
3867 DECL_ARG_TYPE (new_parm
) = ptype
;
3868 DECL_CONTEXT (new_parm
) = fndecl
;
3869 TREE_USED (new_parm
) = 1;
3870 DECL_IGNORED_P (new_parm
) = 1;
3871 layout_decl (new_parm
, 0);
3873 if (adj
->op
== IPA_PARM_OP_NEW
)
3877 adj
->new_decl
= new_parm
;
3880 link
= &DECL_CHAIN (new_parm
);
3886 tree new_reversed
= NULL
;
3889 new_reversed
= nreverse (new_arg_types
);
3893 TREE_CHAIN (new_arg_types
) = void_list_node
;
3895 new_reversed
= void_list_node
;
3899 /* Use copy_node to preserve as much as possible from original type
3900 (debug info, attribute lists etc.)
3901 Exception is METHOD_TYPEs must have THIS argument.
3902 When we are asked to remove it, we need to build new FUNCTION_TYPE
3904 tree new_type
= NULL
;
3905 if (TREE_CODE (orig_type
) != METHOD_TYPE
3906 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3907 && adjustments
[0].base_index
== 0))
3909 new_type
= build_distinct_type_copy (orig_type
);
3910 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3915 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3917 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3918 DECL_VINDEX (fndecl
) = NULL_TREE
;
3921 /* When signature changes, we need to clear builtin info. */
3922 if (DECL_BUILT_IN (fndecl
))
3924 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3925 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3928 TREE_TYPE (fndecl
) = new_type
;
3929 DECL_VIRTUAL_P (fndecl
) = 0;
3930 DECL_LANG_SPECIFIC (fndecl
) = NULL
;
3935 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3936 If this is a directly recursive call, CS must be NULL. Otherwise it must
3937 contain the corresponding call graph edge. */
3940 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gcall
*stmt
,
3941 ipa_parm_adjustment_vec adjustments
)
3943 struct cgraph_node
*current_node
= cgraph_node::get (current_function_decl
);
3945 vec
<tree
, va_gc
> **debug_args
= NULL
;
3947 gimple_stmt_iterator gsi
, prev_gsi
;
3951 len
= adjustments
.length ();
3953 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
3954 current_node
->remove_stmt_references (stmt
);
3956 gsi
= gsi_for_stmt (stmt
);
3958 gsi_prev (&prev_gsi
);
3959 for (i
= 0; i
< len
; i
++)
3961 struct ipa_parm_adjustment
*adj
;
3963 adj
= &adjustments
[i
];
3965 if (adj
->op
== IPA_PARM_OP_COPY
)
3967 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3969 vargs
.quick_push (arg
);
3971 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3973 tree expr
, base
, off
;
3975 unsigned int deref_align
= 0;
3976 bool deref_base
= false;
3978 /* We create a new parameter out of the value of the old one, we can
3979 do the following kind of transformations:
3981 - A scalar passed by reference is converted to a scalar passed by
3982 value. (adj->by_ref is false and the type of the original
3983 actual argument is a pointer to a scalar).
3985 - A part of an aggregate is passed instead of the whole aggregate.
3986 The part can be passed either by value or by reference, this is
3987 determined by value of adj->by_ref. Moreover, the code below
3988 handles both situations when the original aggregate is passed by
3989 value (its type is not a pointer) and when it is passed by
3990 reference (it is a pointer to an aggregate).
3992 When the new argument is passed by reference (adj->by_ref is true)
3993 it must be a part of an aggregate and therefore we form it by
3994 simply taking the address of a reference inside the original
3997 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
3998 base
= gimple_call_arg (stmt
, adj
->base_index
);
3999 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
4000 : EXPR_LOCATION (base
);
4002 if (TREE_CODE (base
) != ADDR_EXPR
4003 && POINTER_TYPE_P (TREE_TYPE (base
)))
4004 off
= build_int_cst (adj
->alias_ptr_type
,
4005 adj
->offset
/ BITS_PER_UNIT
);
4008 HOST_WIDE_INT base_offset
;
4012 if (TREE_CODE (base
) == ADDR_EXPR
)
4014 base
= TREE_OPERAND (base
, 0);
4020 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
4021 /* Aggregate arguments can have non-invariant addresses. */
4024 base
= build_fold_addr_expr (prev_base
);
4025 off
= build_int_cst (adj
->alias_ptr_type
,
4026 adj
->offset
/ BITS_PER_UNIT
);
4028 else if (TREE_CODE (base
) == MEM_REF
)
4033 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
4035 off
= build_int_cst (adj
->alias_ptr_type
,
4037 + adj
->offset
/ BITS_PER_UNIT
);
4038 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
4040 base
= TREE_OPERAND (base
, 0);
4044 off
= build_int_cst (adj
->alias_ptr_type
,
4046 + adj
->offset
/ BITS_PER_UNIT
);
4047 base
= build_fold_addr_expr (base
);
4053 tree type
= adj
->type
;
4055 unsigned HOST_WIDE_INT misalign
;
4059 align
= deref_align
;
4064 get_pointer_alignment_1 (base
, &align
, &misalign
);
4065 if (TYPE_ALIGN (type
) > align
)
4066 align
= TYPE_ALIGN (type
);
4068 misalign
+= (offset_int::from (off
, SIGNED
).to_short_addr ()
4070 misalign
= misalign
& (align
- 1);
4072 align
= (misalign
& -misalign
);
4073 if (align
< TYPE_ALIGN (type
))
4074 type
= build_aligned_type (type
, align
);
4075 base
= force_gimple_operand_gsi (&gsi
, base
,
4076 true, NULL
, true, GSI_SAME_STMT
);
4077 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
4078 /* If expr is not a valid gimple call argument emit
4079 a load into a temporary. */
4080 if (is_gimple_reg_type (TREE_TYPE (expr
)))
4082 gimple tem
= gimple_build_assign (NULL_TREE
, expr
);
4083 if (gimple_in_ssa_p (cfun
))
4085 gimple_set_vuse (tem
, gimple_vuse (stmt
));
4086 expr
= make_ssa_name (TREE_TYPE (expr
), tem
);
4089 expr
= create_tmp_reg (TREE_TYPE (expr
));
4090 gimple_assign_set_lhs (tem
, expr
);
4091 gsi_insert_before (&gsi
, tem
, GSI_SAME_STMT
);
4096 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
4097 expr
= build_fold_addr_expr (expr
);
4098 expr
= force_gimple_operand_gsi (&gsi
, expr
,
4099 true, NULL
, true, GSI_SAME_STMT
);
4101 vargs
.quick_push (expr
);
4103 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
4106 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
4109 arg
= gimple_call_arg (stmt
, adj
->base_index
);
4110 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
4112 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
4114 arg
= fold_convert_loc (gimple_location (stmt
),
4115 TREE_TYPE (origin
), arg
);
4117 if (debug_args
== NULL
)
4118 debug_args
= decl_debug_args_insert (callee_decl
);
4119 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
4120 if (ddecl
== origin
)
4122 ddecl
= (**debug_args
)[ix
+ 1];
4127 ddecl
= make_node (DEBUG_EXPR_DECL
);
4128 DECL_ARTIFICIAL (ddecl
) = 1;
4129 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
4130 DECL_MODE (ddecl
) = DECL_MODE (origin
);
4132 vec_safe_push (*debug_args
, origin
);
4133 vec_safe_push (*debug_args
, ddecl
);
4135 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
4136 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
4140 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4142 fprintf (dump_file
, "replacing stmt:");
4143 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
4146 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
4148 if (gimple_call_lhs (stmt
))
4149 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
4151 gimple_set_block (new_stmt
, gimple_block (stmt
));
4152 if (gimple_has_location (stmt
))
4153 gimple_set_location (new_stmt
, gimple_location (stmt
));
4154 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
4155 gimple_call_copy_flags (new_stmt
, stmt
);
4156 if (gimple_in_ssa_p (cfun
))
4158 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4159 if (gimple_vdef (stmt
))
4161 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4162 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
4166 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4168 fprintf (dump_file
, "with stmt:");
4169 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
4170 fprintf (dump_file
, "\n");
4172 gsi_replace (&gsi
, new_stmt
, true);
4174 cs
->set_call_stmt (new_stmt
);
4177 current_node
->record_stmt_references (gsi_stmt (gsi
));
4180 while (gsi_stmt (gsi
) != gsi_stmt (prev_gsi
));
4183 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4184 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4185 specifies whether the function should care about type incompatibility the
4186 current and new expressions. If it is false, the function will leave
4187 incompatibility issues to the caller. Return true iff the expression
4191 ipa_modify_expr (tree
*expr
, bool convert
,
4192 ipa_parm_adjustment_vec adjustments
)
4194 struct ipa_parm_adjustment
*cand
4195 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
4201 src
= build_simple_mem_ref (cand
->new_decl
);
4203 src
= cand
->new_decl
;
4205 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4207 fprintf (dump_file
, "About to replace expr ");
4208 print_generic_expr (dump_file
, *expr
, 0);
4209 fprintf (dump_file
, " with ");
4210 print_generic_expr (dump_file
, src
, 0);
4211 fprintf (dump_file
, "\n");
4214 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4216 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4224 /* If T is an SSA_NAME, return NULL if it is not a default def or
4225 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4226 the base variable is always returned, regardless if it is a default
4227 def. Return T if it is not an SSA_NAME. */
4230 get_ssa_base_param (tree t
, bool ignore_default_def
)
4232 if (TREE_CODE (t
) == SSA_NAME
)
4234 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
4235 return SSA_NAME_VAR (t
);
4242 /* Given an expression, return an adjustment entry specifying the
4243 transformation to be done on EXPR. If no suitable adjustment entry
4244 was found, returns NULL.
4246 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4247 default def, otherwise bail on them.
4249 If CONVERT is non-NULL, this function will set *CONVERT if the
4250 expression provided is a component reference. ADJUSTMENTS is the
4251 adjustments vector. */
4253 ipa_parm_adjustment
*
4254 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
4255 ipa_parm_adjustment_vec adjustments
,
4256 bool ignore_default_def
)
4258 if (TREE_CODE (**expr
) == BIT_FIELD_REF
4259 || TREE_CODE (**expr
) == IMAGPART_EXPR
4260 || TREE_CODE (**expr
) == REALPART_EXPR
)
4262 *expr
= &TREE_OPERAND (**expr
, 0);
4267 HOST_WIDE_INT offset
, size
, max_size
;
4268 tree base
= get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
);
4269 if (!base
|| size
== -1 || max_size
== -1)
4272 if (TREE_CODE (base
) == MEM_REF
)
4274 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
4275 base
= TREE_OPERAND (base
, 0);
4278 base
= get_ssa_base_param (base
, ignore_default_def
);
4279 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4282 struct ipa_parm_adjustment
*cand
= NULL
;
4283 unsigned int len
= adjustments
.length ();
4284 for (unsigned i
= 0; i
< len
; i
++)
4286 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
4288 if (adj
->base
== base
4289 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
4296 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
4301 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4304 index_in_adjustments_multiple_times_p (int base_index
,
4305 ipa_parm_adjustment_vec adjustments
)
4307 int i
, len
= adjustments
.length ();
4310 for (i
= 0; i
< len
; i
++)
4312 struct ipa_parm_adjustment
*adj
;
4313 adj
= &adjustments
[i
];
4315 if (adj
->base_index
== base_index
)
4327 /* Return adjustments that should have the same effect on function parameters
4328 and call arguments as if they were first changed according to adjustments in
4329 INNER and then by adjustments in OUTER. */
4331 ipa_parm_adjustment_vec
4332 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
4333 ipa_parm_adjustment_vec outer
)
4335 int i
, outlen
= outer
.length ();
4336 int inlen
= inner
.length ();
4338 ipa_parm_adjustment_vec adjustments
, tmp
;
4341 for (i
= 0; i
< inlen
; i
++)
4343 struct ipa_parm_adjustment
*n
;
4346 if (n
->op
== IPA_PARM_OP_REMOVE
)
4350 /* FIXME: Handling of new arguments are not implemented yet. */
4351 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
4352 tmp
.quick_push (*n
);
4356 adjustments
.create (outlen
+ removals
);
4357 for (i
= 0; i
< outlen
; i
++)
4359 struct ipa_parm_adjustment r
;
4360 struct ipa_parm_adjustment
*out
= &outer
[i
];
4361 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
4363 memset (&r
, 0, sizeof (r
));
4364 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
4365 if (out
->op
== IPA_PARM_OP_REMOVE
)
4367 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
4369 r
.op
= IPA_PARM_OP_REMOVE
;
4370 adjustments
.quick_push (r
);
4376 /* FIXME: Handling of new arguments are not implemented yet. */
4377 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
4380 r
.base_index
= in
->base_index
;
4383 /* FIXME: Create nonlocal value too. */
4385 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4386 r
.op
= IPA_PARM_OP_COPY
;
4387 else if (in
->op
== IPA_PARM_OP_COPY
)
4388 r
.offset
= out
->offset
;
4389 else if (out
->op
== IPA_PARM_OP_COPY
)
4390 r
.offset
= in
->offset
;
4392 r
.offset
= in
->offset
+ out
->offset
;
4393 adjustments
.quick_push (r
);
4396 for (i
= 0; i
< inlen
; i
++)
4398 struct ipa_parm_adjustment
*n
= &inner
[i
];
4400 if (n
->op
== IPA_PARM_OP_REMOVE
)
4401 adjustments
.quick_push (*n
);
4408 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4409 friendly way, assuming they are meant to be applied to FNDECL. */
4412 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4415 int i
, len
= adjustments
.length ();
4417 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4419 fprintf (file
, "IPA param adjustments: ");
4420 for (i
= 0; i
< len
; i
++)
4422 struct ipa_parm_adjustment
*adj
;
4423 adj
= &adjustments
[i
];
4426 fprintf (file
, " ");
4430 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4431 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4434 fprintf (file
, ", base: ");
4435 print_generic_expr (file
, adj
->base
, 0);
4439 fprintf (file
, ", new_decl: ");
4440 print_generic_expr (file
, adj
->new_decl
, 0);
4442 if (adj
->new_ssa_base
)
4444 fprintf (file
, ", new_ssa_base: ");
4445 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4448 if (adj
->op
== IPA_PARM_OP_COPY
)
4449 fprintf (file
, ", copy_param");
4450 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4451 fprintf (file
, ", remove_param");
4453 fprintf (file
, ", offset %li", (long) adj
->offset
);
4455 fprintf (file
, ", by_ref");
4456 print_node_brief (file
, ", type: ", adj
->type
, 0);
4457 fprintf (file
, "\n");
4462 /* Dump the AV linked list. */
4465 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4468 fprintf (f
, " Aggregate replacements:");
4469 for (; av
; av
= av
->next
)
4471 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4472 av
->index
, av
->offset
);
4473 print_generic_expr (f
, av
->value
, 0);
4479 /* Stream out jump function JUMP_FUNC to OB. */
4482 ipa_write_jump_function (struct output_block
*ob
,
4483 struct ipa_jump_func
*jump_func
)
4485 struct ipa_agg_jf_item
*item
;
4486 struct bitpack_d bp
;
4489 streamer_write_uhwi (ob
, jump_func
->type
);
4490 switch (jump_func
->type
)
4492 case IPA_JF_UNKNOWN
:
4496 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4497 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4499 case IPA_JF_PASS_THROUGH
:
4500 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4501 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4503 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4504 bp
= bitpack_create (ob
->main_stream
);
4505 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4506 streamer_write_bitpack (&bp
);
4510 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4511 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4514 case IPA_JF_ANCESTOR
:
4515 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4516 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4517 bp
= bitpack_create (ob
->main_stream
);
4518 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4519 streamer_write_bitpack (&bp
);
4523 count
= vec_safe_length (jump_func
->agg
.items
);
4524 streamer_write_uhwi (ob
, count
);
4527 bp
= bitpack_create (ob
->main_stream
);
4528 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4529 streamer_write_bitpack (&bp
);
4532 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4534 streamer_write_uhwi (ob
, item
->offset
);
4535 stream_write_tree (ob
, item
->value
, true);
4538 bp
= bitpack_create (ob
->main_stream
);
4539 bp_pack_value (&bp
, jump_func
->alignment
.known
, 1);
4540 streamer_write_bitpack (&bp
);
4541 if (jump_func
->alignment
.known
)
4543 streamer_write_uhwi (ob
, jump_func
->alignment
.align
);
4544 streamer_write_uhwi (ob
, jump_func
->alignment
.misalign
);
4548 /* Read in jump function JUMP_FUNC from IB. */
4551 ipa_read_jump_function (struct lto_input_block
*ib
,
4552 struct ipa_jump_func
*jump_func
,
4553 struct cgraph_edge
*cs
,
4554 struct data_in
*data_in
)
4556 enum jump_func_type jftype
;
4557 enum tree_code operation
;
4560 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4563 case IPA_JF_UNKNOWN
:
4564 ipa_set_jf_unknown (jump_func
);
4567 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4569 case IPA_JF_PASS_THROUGH
:
4570 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4571 if (operation
== NOP_EXPR
)
4573 int formal_id
= streamer_read_uhwi (ib
);
4574 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4575 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4576 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4580 tree operand
= stream_read_tree (ib
, data_in
);
4581 int formal_id
= streamer_read_uhwi (ib
);
4582 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4586 case IPA_JF_ANCESTOR
:
4588 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4589 int formal_id
= streamer_read_uhwi (ib
);
4590 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4591 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4592 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4597 count
= streamer_read_uhwi (ib
);
4598 vec_alloc (jump_func
->agg
.items
, count
);
4601 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4602 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4604 for (i
= 0; i
< count
; i
++)
4606 struct ipa_agg_jf_item item
;
4607 item
.offset
= streamer_read_uhwi (ib
);
4608 item
.value
= stream_read_tree (ib
, data_in
);
4609 jump_func
->agg
.items
->quick_push (item
);
4612 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4613 bool alignment_known
= bp_unpack_value (&bp
, 1);
4614 if (alignment_known
)
4616 jump_func
->alignment
.known
= true;
4617 jump_func
->alignment
.align
= streamer_read_uhwi (ib
);
4618 jump_func
->alignment
.misalign
= streamer_read_uhwi (ib
);
4621 jump_func
->alignment
.known
= false;
4624 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4625 relevant to indirect inlining to OB. */
4628 ipa_write_indirect_edge_info (struct output_block
*ob
,
4629 struct cgraph_edge
*cs
)
4631 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4632 struct bitpack_d bp
;
4634 streamer_write_hwi (ob
, ii
->param_index
);
4635 bp
= bitpack_create (ob
->main_stream
);
4636 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4637 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4638 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4639 bp_pack_value (&bp
, ii
->by_ref
, 1);
4640 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4641 streamer_write_bitpack (&bp
);
4642 if (ii
->agg_contents
|| ii
->polymorphic
)
4643 streamer_write_hwi (ob
, ii
->offset
);
4645 gcc_assert (ii
->offset
== 0);
4647 if (ii
->polymorphic
)
4649 streamer_write_hwi (ob
, ii
->otr_token
);
4650 stream_write_tree (ob
, ii
->otr_type
, true);
4651 ii
->context
.stream_out (ob
);
4655 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4656 relevant to indirect inlining from IB. */
4659 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4660 struct data_in
*data_in
,
4661 struct cgraph_edge
*cs
)
4663 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4664 struct bitpack_d bp
;
4666 ii
->param_index
= (int) streamer_read_hwi (ib
);
4667 bp
= streamer_read_bitpack (ib
);
4668 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4669 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4670 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4671 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4672 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4673 if (ii
->agg_contents
|| ii
->polymorphic
)
4674 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4677 if (ii
->polymorphic
)
4679 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4680 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4681 ii
->context
.stream_in (ib
, data_in
);
4685 /* Stream out NODE info to OB. */
4688 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4691 lto_symtab_encoder_t encoder
;
4692 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4694 struct cgraph_edge
*e
;
4695 struct bitpack_d bp
;
4697 encoder
= ob
->decl_state
->symtab_node_encoder
;
4698 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4699 streamer_write_uhwi (ob
, node_ref
);
4701 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4702 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4703 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4704 bp
= bitpack_create (ob
->main_stream
);
4705 gcc_assert (info
->analysis_done
4706 || ipa_get_param_count (info
) == 0);
4707 gcc_assert (!info
->node_enqueued
);
4708 gcc_assert (!info
->ipcp_orig_node
);
4709 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4710 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4711 streamer_write_bitpack (&bp
);
4712 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4713 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4714 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4716 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4718 streamer_write_uhwi (ob
,
4719 ipa_get_cs_argument_count (args
) * 2
4720 + (args
->polymorphic_call_contexts
!= NULL
));
4721 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4723 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4724 if (args
->polymorphic_call_contexts
!= NULL
)
4725 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4728 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4730 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4732 streamer_write_uhwi (ob
,
4733 ipa_get_cs_argument_count (args
) * 2
4734 + (args
->polymorphic_call_contexts
!= NULL
));
4735 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4737 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4738 if (args
->polymorphic_call_contexts
!= NULL
)
4739 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4741 ipa_write_indirect_edge_info (ob
, e
);
4745 /* Stream in NODE info from IB. */
4748 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4749 struct data_in
*data_in
)
4751 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4753 struct cgraph_edge
*e
;
4754 struct bitpack_d bp
;
4756 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4758 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4759 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4761 bp
= streamer_read_bitpack (ib
);
4762 if (ipa_get_param_count (info
) != 0)
4763 info
->analysis_done
= true;
4764 info
->node_enqueued
= false;
4765 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4766 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4767 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4768 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4769 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4771 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4772 int count
= streamer_read_uhwi (ib
);
4773 bool contexts_computed
= count
& 1;
4778 vec_safe_grow_cleared (args
->jump_functions
, count
);
4779 if (contexts_computed
)
4780 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4782 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4784 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4786 if (contexts_computed
)
4787 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4790 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4792 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4793 int count
= streamer_read_uhwi (ib
);
4794 bool contexts_computed
= count
& 1;
4799 vec_safe_grow_cleared (args
->jump_functions
, count
);
4800 if (contexts_computed
)
4801 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4802 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4804 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4806 if (contexts_computed
)
4807 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4810 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4814 /* Write jump functions for nodes in SET. */
4817 ipa_prop_write_jump_functions (void)
4819 struct cgraph_node
*node
;
4820 struct output_block
*ob
;
4821 unsigned int count
= 0;
4822 lto_symtab_encoder_iterator lsei
;
4823 lto_symtab_encoder_t encoder
;
4825 if (!ipa_node_params_sum
)
4828 ob
= create_output_block (LTO_section_jump_functions
);
4829 encoder
= ob
->decl_state
->symtab_node_encoder
;
4831 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4832 lsei_next_function_in_partition (&lsei
))
4834 node
= lsei_cgraph_node (lsei
);
4835 if (node
->has_gimple_body_p ()
4836 && IPA_NODE_REF (node
) != NULL
)
4840 streamer_write_uhwi (ob
, count
);
4842 /* Process all of the functions. */
4843 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4844 lsei_next_function_in_partition (&lsei
))
4846 node
= lsei_cgraph_node (lsei
);
4847 if (node
->has_gimple_body_p ()
4848 && IPA_NODE_REF (node
) != NULL
)
4849 ipa_write_node_info (ob
, node
);
4851 streamer_write_char_stream (ob
->main_stream
, 0);
4852 produce_asm (ob
, NULL
);
4853 destroy_output_block (ob
);
4856 /* Read section in file FILE_DATA of length LEN with data DATA. */
4859 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4862 const struct lto_function_header
*header
=
4863 (const struct lto_function_header
*) data
;
4864 const int cfg_offset
= sizeof (struct lto_function_header
);
4865 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4866 const int string_offset
= main_offset
+ header
->main_size
;
4867 struct data_in
*data_in
;
4871 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4872 header
->main_size
, file_data
->mode_table
);
4875 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4876 header
->string_size
, vNULL
);
4877 count
= streamer_read_uhwi (&ib_main
);
4879 for (i
= 0; i
< count
; i
++)
4882 struct cgraph_node
*node
;
4883 lto_symtab_encoder_t encoder
;
4885 index
= streamer_read_uhwi (&ib_main
);
4886 encoder
= file_data
->symtab_node_encoder
;
4887 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4889 gcc_assert (node
->definition
);
4890 ipa_read_node_info (&ib_main
, node
, data_in
);
4892 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4894 lto_data_in_delete (data_in
);
4897 /* Read ipcp jump functions. */
4900 ipa_prop_read_jump_functions (void)
4902 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4903 struct lto_file_decl_data
*file_data
;
4906 ipa_check_create_node_params ();
4907 ipa_check_create_edge_args ();
4908 ipa_register_cgraph_hooks ();
4910 while ((file_data
= file_data_vec
[j
++]))
4913 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4916 ipa_prop_read_section (file_data
, data
, len
);
4920 /* After merging units, we can get mismatch in argument counts.
4921 Also decl merging might've rendered parameter lists obsolete.
4922 Also compute called_with_variable_arg info. */
4925 ipa_update_after_lto_read (void)
4927 ipa_check_create_node_params ();
4928 ipa_check_create_edge_args ();
4932 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
4935 unsigned int count
= 0;
4936 lto_symtab_encoder_t encoder
;
4937 struct ipa_agg_replacement_value
*aggvals
, *av
;
4939 aggvals
= ipa_get_agg_replacements_for_node (node
);
4940 encoder
= ob
->decl_state
->symtab_node_encoder
;
4941 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4942 streamer_write_uhwi (ob
, node_ref
);
4944 for (av
= aggvals
; av
; av
= av
->next
)
4946 streamer_write_uhwi (ob
, count
);
4948 for (av
= aggvals
; av
; av
= av
->next
)
4950 struct bitpack_d bp
;
4952 streamer_write_uhwi (ob
, av
->offset
);
4953 streamer_write_uhwi (ob
, av
->index
);
4954 stream_write_tree (ob
, av
->value
, true);
4956 bp
= bitpack_create (ob
->main_stream
);
4957 bp_pack_value (&bp
, av
->by_ref
, 1);
4958 streamer_write_bitpack (&bp
);
4961 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4962 if (ts
&& vec_safe_length (ts
->alignments
) > 0)
4964 count
= ts
->alignments
->length ();
4966 streamer_write_uhwi (ob
, count
);
4967 for (unsigned i
= 0; i
< count
; ++i
)
4969 ipa_alignment
*parm_al
= &(*ts
->alignments
)[i
];
4971 struct bitpack_d bp
;
4972 bp
= bitpack_create (ob
->main_stream
);
4973 bp_pack_value (&bp
, parm_al
->known
, 1);
4974 streamer_write_bitpack (&bp
);
4977 streamer_write_uhwi (ob
, parm_al
->align
);
4978 streamer_write_hwi_in_range (ob
->main_stream
, 0, parm_al
->align
,
4984 streamer_write_uhwi (ob
, 0);
4987 /* Stream in the aggregate value replacement chain for NODE from IB. */
4990 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
4993 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4994 unsigned int count
, i
;
4996 count
= streamer_read_uhwi (ib
);
4997 for (i
= 0; i
<count
; i
++)
4999 struct ipa_agg_replacement_value
*av
;
5000 struct bitpack_d bp
;
5002 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
5003 av
->offset
= streamer_read_uhwi (ib
);
5004 av
->index
= streamer_read_uhwi (ib
);
5005 av
->value
= stream_read_tree (ib
, data_in
);
5006 bp
= streamer_read_bitpack (ib
);
5007 av
->by_ref
= bp_unpack_value (&bp
, 1);
5011 ipa_set_node_agg_value_chain (node
, aggvals
);
5013 count
= streamer_read_uhwi (ib
);
5016 ipcp_grow_transformations_if_necessary ();
5018 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5019 vec_safe_grow_cleared (ts
->alignments
, count
);
5021 for (i
= 0; i
< count
; i
++)
5023 ipa_alignment
*parm_al
;
5024 parm_al
= &(*ts
->alignments
)[i
];
5025 struct bitpack_d bp
;
5026 bp
= streamer_read_bitpack (ib
);
5027 parm_al
->known
= bp_unpack_value (&bp
, 1);
5030 parm_al
->align
= streamer_read_uhwi (ib
);
5032 = streamer_read_hwi_in_range (ib
, "ipa-prop misalign",
5039 /* Write all aggregate replacement for nodes in set. */
5042 ipcp_write_transformation_summaries (void)
5044 struct cgraph_node
*node
;
5045 struct output_block
*ob
;
5046 unsigned int count
= 0;
5047 lto_symtab_encoder_iterator lsei
;
5048 lto_symtab_encoder_t encoder
;
5050 ob
= create_output_block (LTO_section_ipcp_transform
);
5051 encoder
= ob
->decl_state
->symtab_node_encoder
;
5053 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5054 lsei_next_function_in_partition (&lsei
))
5056 node
= lsei_cgraph_node (lsei
);
5057 if (node
->has_gimple_body_p ())
5061 streamer_write_uhwi (ob
, count
);
5063 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5064 lsei_next_function_in_partition (&lsei
))
5066 node
= lsei_cgraph_node (lsei
);
5067 if (node
->has_gimple_body_p ())
5068 write_ipcp_transformation_info (ob
, node
);
5070 streamer_write_char_stream (ob
->main_stream
, 0);
5071 produce_asm (ob
, NULL
);
5072 destroy_output_block (ob
);
5075 /* Read replacements section in file FILE_DATA of length LEN with data
5079 read_replacements_section (struct lto_file_decl_data
*file_data
,
5083 const struct lto_function_header
*header
=
5084 (const struct lto_function_header
*) data
;
5085 const int cfg_offset
= sizeof (struct lto_function_header
);
5086 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5087 const int string_offset
= main_offset
+ header
->main_size
;
5088 struct data_in
*data_in
;
5092 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5093 header
->main_size
, file_data
->mode_table
);
5095 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5096 header
->string_size
, vNULL
);
5097 count
= streamer_read_uhwi (&ib_main
);
5099 for (i
= 0; i
< count
; i
++)
5102 struct cgraph_node
*node
;
5103 lto_symtab_encoder_t encoder
;
5105 index
= streamer_read_uhwi (&ib_main
);
5106 encoder
= file_data
->symtab_node_encoder
;
5107 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5109 gcc_assert (node
->definition
);
5110 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5112 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5114 lto_data_in_delete (data_in
);
5117 /* Read IPA-CP aggregate replacements. */
5120 ipcp_read_transformation_summaries (void)
5122 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5123 struct lto_file_decl_data
*file_data
;
5126 while ((file_data
= file_data_vec
[j
++]))
5129 const char *data
= lto_get_section_data (file_data
,
5130 LTO_section_ipcp_transform
,
5133 read_replacements_section (file_data
, data
, len
);
5137 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5141 adjust_agg_replacement_values (struct cgraph_node
*node
,
5142 struct ipa_agg_replacement_value
*aggval
)
5144 struct ipa_agg_replacement_value
*v
;
5145 int i
, c
= 0, d
= 0, *adj
;
5147 if (!node
->clone
.combined_args_to_skip
)
5150 for (v
= aggval
; v
; v
= v
->next
)
5152 gcc_assert (v
->index
>= 0);
5158 adj
= XALLOCAVEC (int, c
);
5159 for (i
= 0; i
< c
; i
++)
5160 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5168 for (v
= aggval
; v
; v
= v
->next
)
5169 v
->index
= adj
[v
->index
];
5172 /* Dominator walker driving the ipcp modification phase. */
5174 class ipcp_modif_dom_walker
: public dom_walker
5177 ipcp_modif_dom_walker (struct func_body_info
*fbi
,
5178 vec
<ipa_param_descriptor
> descs
,
5179 struct ipa_agg_replacement_value
*av
,
5181 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5182 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5184 virtual void before_dom_children (basic_block
);
5187 struct func_body_info
*m_fbi
;
5188 vec
<ipa_param_descriptor
> m_descriptors
;
5189 struct ipa_agg_replacement_value
*m_aggval
;
5190 bool *m_something_changed
, *m_cfg_changed
;
5194 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5196 gimple_stmt_iterator gsi
;
5197 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5199 struct ipa_agg_replacement_value
*v
;
5200 gimple stmt
= gsi_stmt (gsi
);
5202 HOST_WIDE_INT offset
, size
;
5206 if (!gimple_assign_load_p (stmt
))
5208 rhs
= gimple_assign_rhs1 (stmt
);
5209 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5214 while (handled_component_p (t
))
5216 /* V_C_E can do things like convert an array of integers to one
5217 bigger integer and similar things we do not handle below. */
5218 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
5223 t
= TREE_OPERAND (t
, 0);
5228 if (!ipa_load_from_parm_agg_1 (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5229 &offset
, &size
, &by_ref
))
5231 for (v
= m_aggval
; v
; v
= v
->next
)
5232 if (v
->index
== index
5233 && v
->offset
== offset
)
5236 || v
->by_ref
!= by_ref
5237 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
5240 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5241 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5243 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5244 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5245 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5246 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5247 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5252 fprintf (dump_file
, " const ");
5253 print_generic_expr (dump_file
, v
->value
, 0);
5254 fprintf (dump_file
, " can't be converted to type of ");
5255 print_generic_expr (dump_file
, rhs
, 0);
5256 fprintf (dump_file
, "\n");
5264 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5266 fprintf (dump_file
, "Modifying stmt:\n ");
5267 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5269 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5272 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5274 fprintf (dump_file
, "into:\n ");
5275 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5276 fprintf (dump_file
, "\n");
5279 *m_something_changed
= true;
5280 if (maybe_clean_eh_stmt (stmt
)
5281 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5282 *m_cfg_changed
= true;
5287 /* Update alignment of formal parameters as described in
5288 ipcp_transformation_summary. */
5291 ipcp_update_alignments (struct cgraph_node
*node
)
5293 tree fndecl
= node
->decl
;
5294 tree parm
= DECL_ARGUMENTS (fndecl
);
5295 tree next_parm
= parm
;
5296 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5297 if (!ts
|| vec_safe_length (ts
->alignments
) == 0)
5299 const vec
<ipa_alignment
, va_gc
> &alignments
= *ts
->alignments
;
5300 unsigned count
= alignments
.length ();
5302 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5304 if (node
->clone
.combined_args_to_skip
5305 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5307 gcc_checking_assert (parm
);
5308 next_parm
= DECL_CHAIN (parm
);
5310 if (!alignments
[i
].known
|| !is_gimple_reg (parm
))
5312 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5317 fprintf (dump_file
, " Adjusting alignment of param %u to %u, "
5318 "misalignment to %u\n", i
, alignments
[i
].align
,
5319 alignments
[i
].misalign
);
5321 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5322 gcc_checking_assert (pi
);
5324 unsigned old_misalign
;
5325 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5328 && old_align
>= alignments
[i
].align
)
5331 fprintf (dump_file
, " But the alignment was already %u.\n",
5335 set_ptr_info_alignment (pi
, alignments
[i
].align
, alignments
[i
].misalign
);
5339 /* IPCP transformation phase doing propagation of aggregate values. */
5342 ipcp_transform_function (struct cgraph_node
*node
)
5344 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
5345 struct func_body_info fbi
;
5346 struct ipa_agg_replacement_value
*aggval
;
5348 bool cfg_changed
= false, something_changed
= false;
5350 gcc_checking_assert (cfun
);
5351 gcc_checking_assert (current_function_decl
);
5354 fprintf (dump_file
, "Modification phase of node %s/%i\n",
5355 node
->name (), node
->order
);
5357 ipcp_update_alignments (node
);
5358 aggval
= ipa_get_agg_replacements_for_node (node
);
5361 param_count
= count_formal_params (node
->decl
);
5362 if (param_count
== 0)
5364 adjust_agg_replacement_values (node
, aggval
);
5366 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5370 fbi
.bb_infos
= vNULL
;
5371 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5372 fbi
.param_count
= param_count
;
5375 descriptors
.safe_grow_cleared (param_count
);
5376 ipa_populate_param_decls (node
, descriptors
);
5377 calculate_dominance_info (CDI_DOMINATORS
);
5378 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5379 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5382 struct ipa_bb_info
*bi
;
5383 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5384 free_ipa_bb_info (bi
);
5385 fbi
.bb_infos
.release ();
5386 free_dominance_info (CDI_DOMINATORS
);
5387 (*ipcp_transformations
)[node
->uid
].agg_values
= NULL
;
5388 (*ipcp_transformations
)[node
->uid
].alignments
= NULL
;
5389 descriptors
.release ();
5391 if (!something_changed
)
5393 else if (cfg_changed
)
5394 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5396 return TODO_update_ssa_only_virtuals
;