1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "fold-const.h"
31 #include "internal-fn.h"
32 #include "gimple-fold.h"
35 #include "insn-config.h"
44 #include "stor-layout.h"
45 #include "print-tree.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "langhooks.h"
53 #include "alloc-pool.h"
54 #include "symbol-summary.h"
57 #include "tree-into-ssa.h"
59 #include "tree-pass.h"
60 #include "tree-inline.h"
61 #include "ipa-inline.h"
62 #include "diagnostic.h"
63 #include "gimple-pretty-print.h"
64 #include "tree-streamer.h"
66 #include "ipa-utils.h"
71 /* Function summary where the parameter infos are actually stored. */
72 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
73 /* Vector of IPA-CP transformation data for each clone. */
74 vec
<ipcp_transformation_summary
, va_gc
> *ipcp_transformations
;
75 /* Vector where the parameter infos are actually stored. */
76 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
78 /* Holders of ipa cgraph hooks: */
79 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
80 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
81 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
83 /* Description of a reference to an IPA constant. */
84 struct ipa_cst_ref_desc
86 /* Edge that corresponds to the statement which took the reference. */
87 struct cgraph_edge
*cs
;
88 /* Linked list of duplicates created when call graph edges are cloned. */
89 struct ipa_cst_ref_desc
*next_duplicate
;
90 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
95 /* Allocation pool for reference descriptions. */
97 static pool_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
98 ("IPA-PROP ref descriptions", 32);
100 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
101 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
104 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
106 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
110 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
113 /* Return index of the formal whose tree is PTREE in function which corresponds
117 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
121 count
= descriptors
.length ();
122 for (i
= 0; i
< count
; i
++)
123 if (descriptors
[i
].decl
== ptree
)
129 /* Return index of the formal whose tree is PTREE in function which corresponds
133 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
135 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
138 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
142 ipa_populate_param_decls (struct cgraph_node
*node
,
143 vec
<ipa_param_descriptor
> &descriptors
)
151 gcc_assert (gimple_has_body_p (fndecl
));
152 fnargs
= DECL_ARGUMENTS (fndecl
);
154 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
156 descriptors
[param_num
].decl
= parm
;
157 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
163 /* Return how many formal parameters FNDECL has. */
166 count_formal_params (tree fndecl
)
170 gcc_assert (gimple_has_body_p (fndecl
));
172 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
178 /* Return the declaration of Ith formal parameter of the function corresponding
179 to INFO. Note there is no setter function as this array is built just once
180 using ipa_initialize_node_params. */
183 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
185 fprintf (file
, "param #%i", i
);
186 if (info
->descriptors
[i
].decl
)
189 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
193 /* Initialize the ipa_node_params structure associated with NODE
194 to hold PARAM_COUNT parameters. */
197 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
199 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
201 if (!info
->descriptors
.exists () && param_count
)
202 info
->descriptors
.safe_grow_cleared (param_count
);
205 /* Initialize the ipa_node_params structure associated with NODE by counting
206 the function parameters, creating the descriptors and populating their
210 ipa_initialize_node_params (struct cgraph_node
*node
)
212 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
214 if (!info
->descriptors
.exists ())
216 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
217 ipa_populate_param_decls (node
, info
->descriptors
);
221 /* Print the jump functions associated with call graph edge CS to file F. */
224 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
228 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
229 for (i
= 0; i
< count
; i
++)
231 struct ipa_jump_func
*jump_func
;
232 enum jump_func_type type
;
234 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
235 type
= jump_func
->type
;
237 fprintf (f
, " param %d: ", i
);
238 if (type
== IPA_JF_UNKNOWN
)
239 fprintf (f
, "UNKNOWN\n");
240 else if (type
== IPA_JF_CONST
)
242 tree val
= jump_func
->value
.constant
.value
;
243 fprintf (f
, "CONST: ");
244 print_generic_expr (f
, val
, 0);
245 if (TREE_CODE (val
) == ADDR_EXPR
246 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
249 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
254 else if (type
== IPA_JF_PASS_THROUGH
)
256 fprintf (f
, "PASS THROUGH: ");
257 fprintf (f
, "%d, op %s",
258 jump_func
->value
.pass_through
.formal_id
,
259 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
260 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
263 print_generic_expr (f
,
264 jump_func
->value
.pass_through
.operand
, 0);
266 if (jump_func
->value
.pass_through
.agg_preserved
)
267 fprintf (f
, ", agg_preserved");
270 else if (type
== IPA_JF_ANCESTOR
)
272 fprintf (f
, "ANCESTOR: ");
273 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
274 jump_func
->value
.ancestor
.formal_id
,
275 jump_func
->value
.ancestor
.offset
);
276 if (jump_func
->value
.ancestor
.agg_preserved
)
277 fprintf (f
, ", agg_preserved");
281 if (jump_func
->agg
.items
)
283 struct ipa_agg_jf_item
*item
;
286 fprintf (f
, " Aggregate passed by %s:\n",
287 jump_func
->agg
.by_ref
? "reference" : "value");
288 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
290 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
292 if (TYPE_P (item
->value
))
293 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
294 tree_to_uhwi (TYPE_SIZE (item
->value
)));
297 fprintf (f
, "cst: ");
298 print_generic_expr (f
, item
->value
, 0);
304 struct ipa_polymorphic_call_context
*ctx
305 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
306 if (ctx
&& !ctx
->useless_p ())
308 fprintf (f
, " Context: ");
309 ctx
->dump (dump_file
);
312 if (jump_func
->alignment
.known
)
314 fprintf (f
, " Alignment: %u, misalignment: %u\n",
315 jump_func
->alignment
.align
,
316 jump_func
->alignment
.misalign
);
319 fprintf (f
, " Unknown alignment\n");
324 /* Print the jump functions of all arguments on all call graph edges going from
328 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
330 struct cgraph_edge
*cs
;
332 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
334 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
336 if (!ipa_edge_args_info_available_for_edge_p (cs
))
339 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
340 xstrdup_for_dump (node
->name ()), node
->order
,
341 xstrdup_for_dump (cs
->callee
->name ()),
343 ipa_print_node_jump_functions_for_edge (f
, cs
);
346 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
348 struct cgraph_indirect_call_info
*ii
;
349 if (!ipa_edge_args_info_available_for_edge_p (cs
))
352 ii
= cs
->indirect_info
;
353 if (ii
->agg_contents
)
354 fprintf (f
, " indirect %s callsite, calling param %i, "
355 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
356 ii
->member_ptr
? "member ptr" : "aggregate",
357 ii
->param_index
, ii
->offset
,
358 ii
->by_ref
? "by reference" : "by_value");
360 fprintf (f
, " indirect %s callsite, calling param %i, "
361 "offset " HOST_WIDE_INT_PRINT_DEC
,
362 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
367 fprintf (f
, ", for stmt ");
368 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
373 ii
->context
.dump (f
);
374 ipa_print_node_jump_functions_for_edge (f
, cs
);
378 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
381 ipa_print_all_jump_functions (FILE *f
)
383 struct cgraph_node
*node
;
385 fprintf (f
, "\nJump functions:\n");
386 FOR_EACH_FUNCTION (node
)
388 ipa_print_node_jump_functions (f
, node
);
392 /* Set jfunc to be a know-really nothing jump function. */
395 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
397 jfunc
->type
= IPA_JF_UNKNOWN
;
398 jfunc
->alignment
.known
= false;
401 /* Set JFUNC to be a copy of another jmp (to be used by jump function
402 combination code). The two functions will share their rdesc. */
405 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
406 struct ipa_jump_func
*src
)
409 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
410 dst
->type
= IPA_JF_CONST
;
411 dst
->value
.constant
= src
->value
.constant
;
414 /* Set JFUNC to be a constant jmp function. */
417 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
418 struct cgraph_edge
*cs
)
420 constant
= unshare_expr (constant
);
421 if (constant
&& EXPR_P (constant
))
422 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
423 jfunc
->type
= IPA_JF_CONST
;
424 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
426 if (TREE_CODE (constant
) == ADDR_EXPR
427 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
429 struct ipa_cst_ref_desc
*rdesc
;
431 rdesc
= ipa_refdesc_pool
.allocate ();
433 rdesc
->next_duplicate
= NULL
;
435 jfunc
->value
.constant
.rdesc
= rdesc
;
438 jfunc
->value
.constant
.rdesc
= NULL
;
441 /* Set JFUNC to be a simple pass-through jump function. */
443 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
446 jfunc
->type
= IPA_JF_PASS_THROUGH
;
447 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
448 jfunc
->value
.pass_through
.formal_id
= formal_id
;
449 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
450 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
453 /* Set JFUNC to be an arithmetic pass through jump function. */
456 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
457 tree operand
, enum tree_code operation
)
459 jfunc
->type
= IPA_JF_PASS_THROUGH
;
460 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
461 jfunc
->value
.pass_through
.formal_id
= formal_id
;
462 jfunc
->value
.pass_through
.operation
= operation
;
463 jfunc
->value
.pass_through
.agg_preserved
= false;
466 /* Set JFUNC to be an ancestor jump function. */
469 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
470 int formal_id
, bool agg_preserved
)
472 jfunc
->type
= IPA_JF_ANCESTOR
;
473 jfunc
->value
.ancestor
.formal_id
= formal_id
;
474 jfunc
->value
.ancestor
.offset
= offset
;
475 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
478 /* Get IPA BB information about the given BB. FBI is the context of analyzis
479 of this function body. */
481 static struct ipa_bb_info
*
482 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
484 gcc_checking_assert (fbi
);
485 return &fbi
->bb_infos
[bb
->index
];
488 /* Structure to be passed in between detect_type_change and
489 check_stmt_for_type_change. */
491 struct prop_type_change_info
493 /* Offset into the object where there is the virtual method pointer we are
495 HOST_WIDE_INT offset
;
496 /* The declaration or SSA_NAME pointer of the base that we are checking for
499 /* Set to true if dynamic type change has been detected. */
500 bool type_maybe_changed
;
503 /* Return true if STMT can modify a virtual method table pointer.
505 This function makes special assumptions about both constructors and
506 destructors which are all the functions that are allowed to alter the VMT
507 pointers. It assumes that destructors begin with assignment into all VMT
508 pointers and that constructors essentially look in the following way:
510 1) The very first thing they do is that they call constructors of ancestor
511 sub-objects that have them.
513 2) Then VMT pointers of this and all its ancestors is set to new values
514 corresponding to the type corresponding to the constructor.
516 3) Only afterwards, other stuff such as constructor of member sub-objects
517 and the code written by the user is run. Only this may include calling
518 virtual functions, directly or indirectly.
520 There is no way to call a constructor of an ancestor sub-object in any
523 This means that we do not have to care whether constructors get the correct
524 type information because they will always change it (in fact, if we define
525 the type to be given by the VMT pointer, it is undefined).
527 The most important fact to derive from the above is that if, for some
528 statement in the section 3, we try to detect whether the dynamic type has
529 changed, we can safely ignore all calls as we examine the function body
530 backwards until we reach statements in section 2 because these calls cannot
531 be ancestor constructors or destructors (if the input is not bogus) and so
532 do not change the dynamic type (this holds true only for automatically
533 allocated objects but at the moment we devirtualize only these). We then
534 must detect that statements in section 2 change the dynamic type and can try
535 to derive the new type. That is enough and we can stop, we will never see
536 the calls into constructors of sub-objects in this code. Therefore we can
537 safely ignore all call statements that we traverse.
541 stmt_may_be_vtbl_ptr_store (gimple stmt
)
543 if (is_gimple_call (stmt
))
545 if (gimple_clobber_p (stmt
))
547 else if (is_gimple_assign (stmt
))
549 tree lhs
= gimple_assign_lhs (stmt
);
551 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
553 if (flag_strict_aliasing
554 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
557 if (TREE_CODE (lhs
) == COMPONENT_REF
558 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
560 /* In the future we might want to use get_base_ref_and_offset to find
561 if there is a field corresponding to the offset and if so, proceed
562 almost like if it was a component ref. */
568 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
569 to check whether a particular statement may modify the virtual table
570 pointerIt stores its result into DATA, which points to a
571 prop_type_change_info structure. */
574 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
576 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
577 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
579 if (stmt_may_be_vtbl_ptr_store (stmt
))
581 tci
->type_maybe_changed
= true;
588 /* See if ARG is PARAM_DECl describing instance passed by pointer
589 or reference in FUNCTION. Return false if the dynamic type may change
590 in between beggining of the function until CALL is invoked.
592 Generally functions are not allowed to change type of such instances,
593 but they call destructors. We assume that methods can not destroy the THIS
594 pointer. Also as a special cases, constructor and destructors may change
595 type of the THIS pointer. */
598 param_type_may_change_p (tree function
, tree arg
, gimple call
)
600 /* Pure functions can not do any changes on the dynamic type;
601 that require writting to memory. */
602 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
604 /* We need to check if we are within inlined consturctor
605 or destructor (ideally we would have way to check that the
606 inline cdtor is actually working on ARG, but we don't have
607 easy tie on this, so punt on all non-pure cdtors.
608 We may also record the types of cdtors and once we know type
609 of the instance match them.
611 Also code unification optimizations may merge calls from
612 different blocks making return values unreliable. So
613 do nothing during late optimization. */
614 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
616 if (TREE_CODE (arg
) == SSA_NAME
617 && SSA_NAME_IS_DEFAULT_DEF (arg
)
618 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
620 /* Normal (non-THIS) argument. */
621 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
622 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
623 /* THIS pointer of an method - here we want to watch constructors
624 and destructors as those definitely may change the dynamic
626 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
627 && !DECL_CXX_CONSTRUCTOR_P (function
)
628 && !DECL_CXX_DESTRUCTOR_P (function
)
629 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
631 /* Walk the inline stack and watch out for ctors/dtors. */
632 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
633 block
= BLOCK_SUPERCONTEXT (block
))
634 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
642 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
643 callsite CALL) by looking for assignments to its virtual table pointer. If
644 it is, return true and fill in the jump function JFUNC with relevant type
645 information or set it to unknown. ARG is the object itself (not a pointer
646 to it, unless dereferenced). BASE is the base of the memory access as
647 returned by get_ref_base_and_extent, as is the offset.
649 This is helper function for detect_type_change and detect_type_change_ssa
650 that does the heavy work which is usually unnecesary. */
653 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
654 gcall
*call
, struct ipa_jump_func
*jfunc
,
655 HOST_WIDE_INT offset
)
657 struct prop_type_change_info tci
;
659 bool entry_reached
= false;
661 gcc_checking_assert (DECL_P (arg
)
662 || TREE_CODE (arg
) == MEM_REF
663 || handled_component_p (arg
));
665 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
667 /* Const calls cannot call virtual methods through VMT and so type changes do
669 if (!flag_devirtualize
|| !gimple_vuse (call
)
670 /* Be sure expected_type is polymorphic. */
672 || TREE_CODE (comp_type
) != RECORD_TYPE
673 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
674 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
677 ao_ref_init (&ao
, arg
);
680 ao
.size
= POINTER_SIZE
;
681 ao
.max_size
= ao
.size
;
684 tci
.object
= get_base_address (arg
);
685 tci
.type_maybe_changed
= false;
687 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
688 &tci
, NULL
, &entry_reached
);
689 if (!tci
.type_maybe_changed
)
692 ipa_set_jf_unknown (jfunc
);
696 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
697 If it is, return true and fill in the jump function JFUNC with relevant type
698 information or set it to unknown. ARG is the object itself (not a pointer
699 to it, unless dereferenced). BASE is the base of the memory access as
700 returned by get_ref_base_and_extent, as is the offset. */
703 detect_type_change (tree arg
, tree base
, tree comp_type
, gcall
*call
,
704 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
706 if (!flag_devirtualize
)
709 if (TREE_CODE (base
) == MEM_REF
710 && !param_type_may_change_p (current_function_decl
,
711 TREE_OPERAND (base
, 0),
714 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
715 call
, jfunc
, offset
);
718 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
719 SSA name (its dereference will become the base and the offset is assumed to
723 detect_type_change_ssa (tree arg
, tree comp_type
,
724 gcall
*call
, struct ipa_jump_func
*jfunc
)
726 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
727 if (!flag_devirtualize
728 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
731 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
734 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
735 build_int_cst (ptr_type_node
, 0));
737 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
741 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
742 boolean variable pointed to by DATA. */
745 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
748 bool *b
= (bool *) data
;
753 /* Return true if we have already walked so many statements in AA that we
754 should really just start giving up. */
757 aa_overwalked (struct ipa_func_body_info
*fbi
)
759 gcc_checking_assert (fbi
);
760 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
763 /* Find the nearest valid aa status for parameter specified by INDEX that
766 static struct ipa_param_aa_status
*
767 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
772 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
775 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
776 if (!bi
->param_aa_statuses
.is_empty ()
777 && bi
->param_aa_statuses
[index
].valid
)
778 return &bi
->param_aa_statuses
[index
];
782 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
783 structures and/or intialize the result with a dominating description as
786 static struct ipa_param_aa_status
*
787 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
790 gcc_checking_assert (fbi
);
791 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
792 if (bi
->param_aa_statuses
.is_empty ())
793 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
794 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
797 gcc_checking_assert (!paa
->parm_modified
798 && !paa
->ref_modified
799 && !paa
->pt_modified
);
800 struct ipa_param_aa_status
*dom_paa
;
801 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
811 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
812 a value known not to be modified in this function before reaching the
813 statement STMT. FBI holds information about the function we have so far
814 gathered but do not survive the summary building stage. */
817 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
818 gimple stmt
, tree parm_load
)
820 struct ipa_param_aa_status
*paa
;
821 bool modified
= false;
824 /* FIXME: FBI can be NULL if we are being called from outside
825 ipa_node_analysis or ipcp_transform_function, which currently happens
826 during inlining analysis. It would be great to extend fbi's lifetime and
827 always have it. Currently, we are just not afraid of too much walking in
831 if (aa_overwalked (fbi
))
833 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
834 if (paa
->parm_modified
)
840 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
841 ao_ref_init (&refd
, parm_load
);
842 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
845 fbi
->aa_walked
+= walked
;
847 paa
->parm_modified
= true;
851 /* If STMT is an assignment that loads a value from an parameter declaration,
852 return the index of the parameter in ipa_node_params which has not been
853 modified. Otherwise return -1. */
856 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
857 vec
<ipa_param_descriptor
> descriptors
,
863 if (!gimple_assign_single_p (stmt
))
866 op1
= gimple_assign_rhs1 (stmt
);
867 if (TREE_CODE (op1
) != PARM_DECL
)
870 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
872 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
878 /* Return true if memory reference REF (which must be a load through parameter
879 with INDEX) loads data that are known to be unmodified in this function
880 before reaching statement STMT. */
883 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
884 int index
, gimple stmt
, tree ref
)
886 struct ipa_param_aa_status
*paa
;
887 bool modified
= false;
890 /* FIXME: FBI can be NULL if we are being called from outside
891 ipa_node_analysis or ipcp_transform_function, which currently happens
892 during inlining analysis. It would be great to extend fbi's lifetime and
893 always have it. Currently, we are just not afraid of too much walking in
897 if (aa_overwalked (fbi
))
899 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
900 if (paa
->ref_modified
)
906 gcc_checking_assert (gimple_vuse (stmt
));
907 ao_ref_init (&refd
, ref
);
908 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
911 fbi
->aa_walked
+= walked
;
913 paa
->ref_modified
= true;
917 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
918 is known to be unmodified in this function before reaching call statement
919 CALL into which it is passed. FBI describes the function body. */
922 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
923 gimple call
, tree parm
)
925 bool modified
= false;
928 /* It's unnecessary to calculate anything about memory contnets for a const
929 function because it is not goin to use it. But do not cache the result
930 either. Also, no such calculations for non-pointers. */
931 if (!gimple_vuse (call
)
932 || !POINTER_TYPE_P (TREE_TYPE (parm
))
933 || aa_overwalked (fbi
))
936 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
939 if (paa
->pt_modified
)
942 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
943 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
945 fbi
->aa_walked
+= walked
;
947 paa
->pt_modified
= true;
951 /* Return true if we can prove that OP is a memory reference loading unmodified
952 data from an aggregate passed as a parameter and if the aggregate is passed
953 by reference, that the alias type of the load corresponds to the type of the
954 formal parameter (so that we can rely on this type for TBAA in callers).
955 INFO and PARMS_AINFO describe parameters of the current function (but the
956 latter can be NULL), STMT is the load statement. If function returns true,
957 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
958 within the aggregate and whether it is a load from a value passed by
959 reference respectively. */
962 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
963 vec
<ipa_param_descriptor
> descriptors
,
964 gimple stmt
, tree op
, int *index_p
,
965 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
969 HOST_WIDE_INT size
, max_size
;
970 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
972 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
977 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
979 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
990 if (TREE_CODE (base
) != MEM_REF
991 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
992 || !integer_zerop (TREE_OPERAND (base
, 1)))
995 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
997 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
998 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1002 /* This branch catches situations where a pointer parameter is not a
1003 gimple register, for example:
1005 void hip7(S*) (struct S * p)
1007 void (*<T2e4>) (struct S *) D.1867;
1012 D.1867_2 = p.1_1->f;
1017 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1018 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1022 && parm_ref_data_preserved_p (fbi
, index
, stmt
, op
))
1033 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1034 of an assignment statement STMT, try to determine whether we are actually
1035 handling any of the following cases and construct an appropriate jump
1036 function into JFUNC if so:
1038 1) The passed value is loaded from a formal parameter which is not a gimple
1039 register (most probably because it is addressable, the value has to be
1040 scalar) and we can guarantee the value has not changed. This case can
1041 therefore be described by a simple pass-through jump function. For example:
1050 2) The passed value can be described by a simple arithmetic pass-through
1057 D.2064_4 = a.1(D) + 4;
1060 This case can also occur in combination of the previous one, e.g.:
1068 D.2064_4 = a.0_3 + 4;
1071 3) The passed value is an address of an object within another one (which
1072 also passed by reference). Such situations are described by an ancestor
1073 jump function and describe situations such as:
1075 B::foo() (struct B * const this)
1079 D.1845_2 = &this_1(D)->D.1748;
1082 INFO is the structure describing individual parameters access different
1083 stages of IPA optimizations. PARMS_AINFO contains the information that is
1084 only needed for intraprocedural analysis. */
1087 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1088 struct ipa_node_params
*info
,
1089 struct ipa_jump_func
*jfunc
,
1090 gcall
*call
, gimple stmt
, tree name
,
1093 HOST_WIDE_INT offset
, size
, max_size
;
1094 tree op1
, tc_ssa
, base
, ssa
;
1097 op1
= gimple_assign_rhs1 (stmt
);
1099 if (TREE_CODE (op1
) == SSA_NAME
)
1101 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1102 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1104 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1105 SSA_NAME_DEF_STMT (op1
));
1110 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1111 tc_ssa
= gimple_assign_lhs (stmt
);
1116 tree op2
= gimple_assign_rhs2 (stmt
);
1120 if (!is_gimple_ip_invariant (op2
)
1121 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1122 && !useless_type_conversion_p (TREE_TYPE (name
),
1126 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1127 gimple_assign_rhs_code (stmt
));
1129 else if (gimple_assign_single_p (stmt
))
1131 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, tc_ssa
);
1132 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1137 if (TREE_CODE (op1
) != ADDR_EXPR
)
1139 op1
= TREE_OPERAND (op1
, 0);
1140 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1142 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1143 if (TREE_CODE (base
) != MEM_REF
1144 /* If this is a varying address, punt. */
1146 || max_size
!= size
)
1148 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
1149 ssa
= TREE_OPERAND (base
, 0);
1150 if (TREE_CODE (ssa
) != SSA_NAME
1151 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1155 /* Dynamic types are changed in constructors and destructors. */
1156 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1157 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1158 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1159 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1162 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1165 iftmp.1_3 = &obj_2(D)->D.1762;
1167 The base of the MEM_REF must be a default definition SSA NAME of a
1168 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1169 whole MEM_REF expression is returned and the offset calculated from any
1170 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1171 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1174 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1176 HOST_WIDE_INT size
, max_size
;
1177 tree expr
, parm
, obj
;
1179 if (!gimple_assign_single_p (assign
))
1181 expr
= gimple_assign_rhs1 (assign
);
1183 if (TREE_CODE (expr
) != ADDR_EXPR
)
1185 expr
= TREE_OPERAND (expr
, 0);
1187 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1189 if (TREE_CODE (expr
) != MEM_REF
1190 /* If this is a varying address, punt. */
1195 parm
= TREE_OPERAND (expr
, 0);
1196 if (TREE_CODE (parm
) != SSA_NAME
1197 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1198 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1201 *offset
+= mem_ref_offset (expr
).to_short_addr () * BITS_PER_UNIT
;
1207 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1208 statement PHI, try to find out whether NAME is in fact a
1209 multiple-inheritance typecast from a descendant into an ancestor of a formal
1210 parameter and thus can be described by an ancestor jump function and if so,
1211 write the appropriate function into JFUNC.
1213 Essentially we want to match the following pattern:
1221 iftmp.1_3 = &obj_2(D)->D.1762;
1224 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1225 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1229 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1230 struct ipa_node_params
*info
,
1231 struct ipa_jump_func
*jfunc
,
1232 gcall
*call
, gphi
*phi
)
1234 HOST_WIDE_INT offset
;
1235 gimple assign
, cond
;
1236 basic_block phi_bb
, assign_bb
, cond_bb
;
1237 tree tmp
, parm
, expr
, obj
;
1240 if (gimple_phi_num_args (phi
) != 2)
1243 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1244 tmp
= PHI_ARG_DEF (phi
, 0);
1245 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1246 tmp
= PHI_ARG_DEF (phi
, 1);
1249 if (TREE_CODE (tmp
) != SSA_NAME
1250 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1251 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1252 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1255 assign
= SSA_NAME_DEF_STMT (tmp
);
1256 assign_bb
= gimple_bb (assign
);
1257 if (!single_pred_p (assign_bb
))
1259 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1262 parm
= TREE_OPERAND (expr
, 0);
1263 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1267 cond_bb
= single_pred (assign_bb
);
1268 cond
= last_stmt (cond_bb
);
1270 || gimple_code (cond
) != GIMPLE_COND
1271 || gimple_cond_code (cond
) != NE_EXPR
1272 || gimple_cond_lhs (cond
) != parm
1273 || !integer_zerop (gimple_cond_rhs (cond
)))
1276 phi_bb
= gimple_bb (phi
);
1277 for (i
= 0; i
< 2; i
++)
1279 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1280 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1284 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1285 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1288 /* Inspect the given TYPE and return true iff it has the same structure (the
1289 same number of fields of the same types) as a C++ member pointer. If
1290 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1291 corresponding fields there. */
1294 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1298 if (TREE_CODE (type
) != RECORD_TYPE
)
1301 fld
= TYPE_FIELDS (type
);
1302 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1303 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1304 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1310 fld
= DECL_CHAIN (fld
);
1311 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1312 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1317 if (DECL_CHAIN (fld
))
1323 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1324 return the rhs of its defining statement. Otherwise return RHS as it
1328 get_ssa_def_if_simple_copy (tree rhs
)
1330 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1332 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1334 if (gimple_assign_single_p (def_stmt
))
1335 rhs
= gimple_assign_rhs1 (def_stmt
);
1342 /* Simple linked list, describing known contents of an aggregate beforere
1345 struct ipa_known_agg_contents_list
1347 /* Offset and size of the described part of the aggregate. */
1348 HOST_WIDE_INT offset
, size
;
1349 /* Known constant value or NULL if the contents is known to be unknown. */
1351 /* Pointer to the next structure in the list. */
1352 struct ipa_known_agg_contents_list
*next
;
1355 /* Find the proper place in linked list of ipa_known_agg_contents_list
1356 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1357 unless there is a partial overlap, in which case return NULL, or such
1358 element is already there, in which case set *ALREADY_THERE to true. */
1360 static struct ipa_known_agg_contents_list
**
1361 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1362 HOST_WIDE_INT lhs_offset
,
1363 HOST_WIDE_INT lhs_size
,
1364 bool *already_there
)
1366 struct ipa_known_agg_contents_list
**p
= list
;
1367 while (*p
&& (*p
)->offset
< lhs_offset
)
1369 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1374 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1376 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1377 /* We already know this value is subsequently overwritten with
1379 *already_there
= true;
1381 /* Otherwise this is a partial overlap which we cannot
1388 /* Build aggregate jump function from LIST, assuming there are exactly
1389 CONST_COUNT constant entries there and that th offset of the passed argument
1390 is ARG_OFFSET and store it into JFUNC. */
1393 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1394 int const_count
, HOST_WIDE_INT arg_offset
,
1395 struct ipa_jump_func
*jfunc
)
1397 vec_alloc (jfunc
->agg
.items
, const_count
);
1402 struct ipa_agg_jf_item item
;
1403 item
.offset
= list
->offset
- arg_offset
;
1404 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1405 item
.value
= unshare_expr_without_location (list
->constant
);
1406 jfunc
->agg
.items
->quick_push (item
);
1412 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1413 in ARG is filled in with constant values. ARG can either be an aggregate
1414 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1415 aggregate. JFUNC is the jump function into which the constants are
1416 subsequently stored. */
1419 determine_locally_known_aggregate_parts (gcall
*call
, tree arg
,
1421 struct ipa_jump_func
*jfunc
)
1423 struct ipa_known_agg_contents_list
*list
= NULL
;
1424 int item_count
= 0, const_count
= 0;
1425 HOST_WIDE_INT arg_offset
, arg_size
;
1426 gimple_stmt_iterator gsi
;
1428 bool check_ref
, by_ref
;
1431 /* The function operates in three stages. First, we prepare check_ref, r,
1432 arg_base and arg_offset based on what is actually passed as an actual
1435 if (POINTER_TYPE_P (arg_type
))
1438 if (TREE_CODE (arg
) == SSA_NAME
)
1441 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1446 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1447 arg_size
= tree_to_uhwi (type_size
);
1448 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1450 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1452 HOST_WIDE_INT arg_max_size
;
1454 arg
= TREE_OPERAND (arg
, 0);
1455 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1457 if (arg_max_size
== -1
1458 || arg_max_size
!= arg_size
1461 if (DECL_P (arg_base
))
1464 ao_ref_init (&r
, arg_base
);
1474 HOST_WIDE_INT arg_max_size
;
1476 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1480 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1482 if (arg_max_size
== -1
1483 || arg_max_size
!= arg_size
1487 ao_ref_init (&r
, arg
);
1490 /* Second stage walks back the BB, looks at individual statements and as long
1491 as it is confident of how the statements affect contents of the
1492 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1494 gsi
= gsi_for_stmt (call
);
1496 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1498 struct ipa_known_agg_contents_list
*n
, **p
;
1499 gimple stmt
= gsi_stmt (gsi
);
1500 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1501 tree lhs
, rhs
, lhs_base
;
1503 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1505 if (!gimple_assign_single_p (stmt
))
1508 lhs
= gimple_assign_lhs (stmt
);
1509 rhs
= gimple_assign_rhs1 (stmt
);
1510 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1511 || TREE_CODE (lhs
) == BIT_FIELD_REF
1512 || contains_bitfld_component_ref_p (lhs
))
1515 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1517 if (lhs_max_size
== -1
1518 || lhs_max_size
!= lhs_size
)
1523 if (TREE_CODE (lhs_base
) != MEM_REF
1524 || TREE_OPERAND (lhs_base
, 0) != arg_base
1525 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1528 else if (lhs_base
!= arg_base
)
1530 if (DECL_P (lhs_base
))
1536 bool already_there
= false;
1537 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1544 rhs
= get_ssa_def_if_simple_copy (rhs
);
1545 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1547 n
->offset
= lhs_offset
;
1548 if (is_gimple_ip_invariant (rhs
))
1554 n
->constant
= NULL_TREE
;
1559 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1560 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1564 /* Third stage just goes over the list and creates an appropriate vector of
1565 ipa_agg_jf_item structures out of it, of sourse only if there are
1566 any known constants to begin with. */
1570 jfunc
->agg
.by_ref
= by_ref
;
1571 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1576 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1579 tree type
= (e
->callee
1580 ? TREE_TYPE (e
->callee
->decl
)
1581 : gimple_call_fntype (e
->call_stmt
));
1582 tree t
= TYPE_ARG_TYPES (type
);
1584 for (n
= 0; n
< i
; n
++)
1591 return TREE_VALUE (t
);
1594 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1595 for (n
= 0; n
< i
; n
++)
1602 return TREE_TYPE (t
);
1606 /* Compute jump function for all arguments of callsite CS and insert the
1607 information in the jump_functions array in the ipa_edge_args corresponding
1608 to this callsite. */
1611 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
1612 struct cgraph_edge
*cs
)
1614 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1615 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1616 gcall
*call
= cs
->call_stmt
;
1617 int n
, arg_num
= gimple_call_num_args (call
);
1618 bool useful_context
= false;
1620 if (arg_num
== 0 || args
->jump_functions
)
1622 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1623 if (flag_devirtualize
)
1624 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1626 if (gimple_call_internal_p (call
))
1628 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1631 for (n
= 0; n
< arg_num
; n
++)
1633 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1634 tree arg
= gimple_call_arg (call
, n
);
1635 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1636 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1639 struct ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1642 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
);
1643 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1644 if (!context
.useless_p ())
1645 useful_context
= true;
1648 if (POINTER_TYPE_P (TREE_TYPE(arg
)))
1650 unsigned HOST_WIDE_INT hwi_bitpos
;
1653 if (get_pointer_alignment_1 (arg
, &align
, &hwi_bitpos
)
1654 && align
% BITS_PER_UNIT
== 0
1655 && hwi_bitpos
% BITS_PER_UNIT
== 0)
1657 jfunc
->alignment
.known
= true;
1658 jfunc
->alignment
.align
= align
/ BITS_PER_UNIT
;
1659 jfunc
->alignment
.misalign
= hwi_bitpos
/ BITS_PER_UNIT
;
1662 gcc_assert (!jfunc
->alignment
.known
);
1665 gcc_assert (!jfunc
->alignment
.known
);
1667 if (is_gimple_ip_invariant (arg
))
1668 ipa_set_jf_constant (jfunc
, arg
, cs
);
1669 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1670 && TREE_CODE (arg
) == PARM_DECL
)
1672 int index
= ipa_get_param_decl_index (info
, arg
);
1674 gcc_assert (index
>=0);
1675 /* Aggregate passed by value, check for pass-through, otherwise we
1676 will attempt to fill in aggregate contents later in this
1678 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1680 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1684 else if (TREE_CODE (arg
) == SSA_NAME
)
1686 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1688 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1692 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1693 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1698 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1699 if (is_gimple_assign (stmt
))
1700 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1701 call
, stmt
, arg
, param_type
);
1702 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1703 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1705 as_a
<gphi
*> (stmt
));
1709 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1710 passed (because type conversions are ignored in gimple). Usually we can
1711 safely get type from function declaration, but in case of K&R prototypes or
1712 variadic functions we can try our luck with type of the pointer passed.
1713 TODO: Since we look for actual initialization of the memory object, we may better
1714 work out the type based on the memory stores we find. */
1716 param_type
= TREE_TYPE (arg
);
1718 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1719 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1720 && (jfunc
->type
!= IPA_JF_ANCESTOR
1721 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1722 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1723 || POINTER_TYPE_P (param_type
)))
1724 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1726 if (!useful_context
)
1727 vec_free (args
->polymorphic_call_contexts
);
1730 /* Compute jump functions for all edges - both direct and indirect - outgoing
1734 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
1736 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1738 struct cgraph_edge
*cs
;
1740 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
1742 struct cgraph_node
*callee
= cs
->callee
;
1746 callee
->ultimate_alias_target ();
1747 /* We do not need to bother analyzing calls to unknown functions
1748 unless they may become known during lto/whopr. */
1749 if (!callee
->definition
&& !flag_lto
)
1752 ipa_compute_jump_functions_for_edge (fbi
, cs
);
1756 /* If STMT looks like a statement loading a value from a member pointer formal
1757 parameter, return that parameter and store the offset of the field to
1758 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1759 might be clobbered). If USE_DELTA, then we look for a use of the delta
1760 field rather than the pfn. */
1763 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
1764 HOST_WIDE_INT
*offset_p
)
1766 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1768 if (!gimple_assign_single_p (stmt
))
1771 rhs
= gimple_assign_rhs1 (stmt
);
1772 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1774 ref_field
= TREE_OPERAND (rhs
, 1);
1775 rhs
= TREE_OPERAND (rhs
, 0);
1778 ref_field
= NULL_TREE
;
1779 if (TREE_CODE (rhs
) != MEM_REF
)
1781 rec
= TREE_OPERAND (rhs
, 0);
1782 if (TREE_CODE (rec
) != ADDR_EXPR
)
1784 rec
= TREE_OPERAND (rec
, 0);
1785 if (TREE_CODE (rec
) != PARM_DECL
1786 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1788 ref_offset
= TREE_OPERAND (rhs
, 1);
1795 *offset_p
= int_bit_position (fld
);
1799 if (integer_nonzerop (ref_offset
))
1801 return ref_field
== fld
? rec
: NULL_TREE
;
1804 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1808 /* Returns true iff T is an SSA_NAME defined by a statement. */
1811 ipa_is_ssa_with_stmt_def (tree t
)
1813 if (TREE_CODE (t
) == SSA_NAME
1814 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1820 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1821 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1822 indirect call graph edge. */
1824 static struct cgraph_edge
*
1825 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
1828 struct cgraph_edge
*cs
;
1830 cs
= node
->get_edge (stmt
);
1831 cs
->indirect_info
->param_index
= param_index
;
1832 cs
->indirect_info
->agg_contents
= 0;
1833 cs
->indirect_info
->member_ptr
= 0;
1837 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1838 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1839 intermediate information about each formal parameter. Currently it checks
1840 whether the call calls a pointer that is a formal parameter and if so, the
1841 parameter is marked with the called flag and an indirect call graph edge
1842 describing the call is created. This is very simple for ordinary pointers
1843 represented in SSA but not-so-nice when it comes to member pointers. The
1844 ugly part of this function does nothing more than trying to match the
1845 pattern of such a call. An example of such a pattern is the gimple dump
1846 below, the call is on the last line:
1849 f$__delta_5 = f.__delta;
1850 f$__pfn_24 = f.__pfn;
1854 f$__delta_5 = MEM[(struct *)&f];
1855 f$__pfn_24 = MEM[(struct *)&f + 4B];
1857 and a few lines below:
1860 D.2496_3 = (int) f$__pfn_24;
1861 D.2497_4 = D.2496_3 & 1;
1868 D.2500_7 = (unsigned int) f$__delta_5;
1869 D.2501_8 = &S + D.2500_7;
1870 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1871 D.2503_10 = *D.2502_9;
1872 D.2504_12 = f$__pfn_24 + -1;
1873 D.2505_13 = (unsigned int) D.2504_12;
1874 D.2506_14 = D.2503_10 + D.2505_13;
1875 D.2507_15 = *D.2506_14;
1876 iftmp.11_16 = (String:: *) D.2507_15;
1879 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1880 D.2500_19 = (unsigned int) f$__delta_5;
1881 D.2508_20 = &S + D.2500_19;
1882 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1884 Such patterns are results of simple calls to a member pointer:
1886 int doprinting (int (MyString::* f)(int) const)
1888 MyString S ("somestring");
1893 Moreover, the function also looks for called pointers loaded from aggregates
1894 passed by value or reference. */
1897 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
1900 struct ipa_node_params
*info
= fbi
->info
;
1901 HOST_WIDE_INT offset
;
1904 if (SSA_NAME_IS_DEFAULT_DEF (target
))
1906 tree var
= SSA_NAME_VAR (target
);
1907 int index
= ipa_get_param_decl_index (info
, var
);
1909 ipa_note_param_call (fbi
->node
, index
, call
);
1914 gimple def
= SSA_NAME_DEF_STMT (target
);
1915 if (gimple_assign_single_p (def
)
1916 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
1917 gimple_assign_rhs1 (def
), &index
, &offset
,
1920 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
1921 cs
->indirect_info
->offset
= offset
;
1922 cs
->indirect_info
->agg_contents
= 1;
1923 cs
->indirect_info
->by_ref
= by_ref
;
1927 /* Now we need to try to match the complex pattern of calling a member
1929 if (gimple_code (def
) != GIMPLE_PHI
1930 || gimple_phi_num_args (def
) != 2
1931 || !POINTER_TYPE_P (TREE_TYPE (target
))
1932 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
1935 /* First, we need to check whether one of these is a load from a member
1936 pointer that is a parameter to this function. */
1937 tree n1
= PHI_ARG_DEF (def
, 0);
1938 tree n2
= PHI_ARG_DEF (def
, 1);
1939 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
1941 gimple d1
= SSA_NAME_DEF_STMT (n1
);
1942 gimple d2
= SSA_NAME_DEF_STMT (n2
);
1945 basic_block bb
, virt_bb
;
1946 basic_block join
= gimple_bb (def
);
1947 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
1949 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
1952 bb
= EDGE_PRED (join
, 0)->src
;
1953 virt_bb
= gimple_bb (d2
);
1955 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
1957 bb
= EDGE_PRED (join
, 1)->src
;
1958 virt_bb
= gimple_bb (d1
);
1963 /* Second, we need to check that the basic blocks are laid out in the way
1964 corresponding to the pattern. */
1966 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
1967 || single_pred (virt_bb
) != bb
1968 || single_succ (virt_bb
) != join
)
1971 /* Third, let's see that the branching is done depending on the least
1972 significant bit of the pfn. */
1974 gimple branch
= last_stmt (bb
);
1975 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
1978 if ((gimple_cond_code (branch
) != NE_EXPR
1979 && gimple_cond_code (branch
) != EQ_EXPR
)
1980 || !integer_zerop (gimple_cond_rhs (branch
)))
1983 tree cond
= gimple_cond_lhs (branch
);
1984 if (!ipa_is_ssa_with_stmt_def (cond
))
1987 def
= SSA_NAME_DEF_STMT (cond
);
1988 if (!is_gimple_assign (def
)
1989 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
1990 || !integer_onep (gimple_assign_rhs2 (def
)))
1993 cond
= gimple_assign_rhs1 (def
);
1994 if (!ipa_is_ssa_with_stmt_def (cond
))
1997 def
= SSA_NAME_DEF_STMT (cond
);
1999 if (is_gimple_assign (def
)
2000 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2002 cond
= gimple_assign_rhs1 (def
);
2003 if (!ipa_is_ssa_with_stmt_def (cond
))
2005 def
= SSA_NAME_DEF_STMT (cond
);
2009 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2010 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2011 == ptrmemfunc_vbit_in_delta
),
2016 index
= ipa_get_param_decl_index (info
, rec
);
2018 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2020 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2021 cs
->indirect_info
->offset
= offset
;
2022 cs
->indirect_info
->agg_contents
= 1;
2023 cs
->indirect_info
->member_ptr
= 1;
2029 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2030 object referenced in the expression is a formal parameter of the caller
2031 FBI->node (described by FBI->info), create a call note for the
2035 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2036 gcall
*call
, tree target
)
2038 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2040 HOST_WIDE_INT anc_offset
;
2042 if (!flag_devirtualize
)
2045 if (TREE_CODE (obj
) != SSA_NAME
)
2048 struct ipa_node_params
*info
= fbi
->info
;
2049 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2051 struct ipa_jump_func jfunc
;
2052 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2056 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2057 gcc_assert (index
>= 0);
2058 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2064 struct ipa_jump_func jfunc
;
2065 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
2068 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2071 index
= ipa_get_param_decl_index (info
,
2072 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2073 gcc_assert (index
>= 0);
2074 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2075 call
, &jfunc
, anc_offset
))
2079 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2080 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2081 ii
->offset
= anc_offset
;
2082 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2083 ii
->otr_type
= obj_type_ref_class (target
);
2084 ii
->polymorphic
= 1;
2087 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2088 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2089 containing intermediate information about each formal parameter. */
2092 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2094 tree target
= gimple_call_fn (call
);
2097 || (TREE_CODE (target
) != SSA_NAME
2098 && !virtual_method_call_p (target
)))
2101 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2102 /* If we previously turned the call into a direct call, there is
2103 no need to analyze. */
2104 if (cs
&& !cs
->indirect_unknown_callee
)
2107 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2110 tree target
= gimple_call_fn (call
);
2111 ipa_polymorphic_call_context
context (current_function_decl
,
2112 target
, call
, &instance
);
2114 gcc_checking_assert (cs
->indirect_info
->otr_type
2115 == obj_type_ref_class (target
));
2116 gcc_checking_assert (cs
->indirect_info
->otr_token
2117 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2119 cs
->indirect_info
->vptr_changed
2120 = !context
.get_dynamic_type (instance
,
2121 OBJ_TYPE_REF_OBJECT (target
),
2122 obj_type_ref_class (target
), call
);
2123 cs
->indirect_info
->context
= context
;
2126 if (TREE_CODE (target
) == SSA_NAME
)
2127 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2128 else if (virtual_method_call_p (target
))
2129 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2133 /* Analyze the call statement STMT with respect to formal parameters (described
2134 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2135 formal parameters are called. */
2138 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple stmt
)
2140 if (is_gimple_call (stmt
))
2141 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2144 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2145 If OP is a parameter declaration, mark it as used in the info structure
2149 visit_ref_for_mod_analysis (gimple
, tree op
, tree
, void *data
)
2151 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2153 op
= get_base_address (op
);
2155 && TREE_CODE (op
) == PARM_DECL
)
2157 int index
= ipa_get_param_decl_index (info
, op
);
2158 gcc_assert (index
>= 0);
2159 ipa_set_param_used (info
, index
, true);
2165 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2166 the findings in various structures of the associated ipa_node_params
2167 structure, such as parameter flags, notes etc. FBI holds various data about
2168 the function being analyzed. */
2171 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2173 gimple_stmt_iterator gsi
;
2174 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2176 gimple stmt
= gsi_stmt (gsi
);
2178 if (is_gimple_debug (stmt
))
2181 ipa_analyze_stmt_uses (fbi
, stmt
);
2182 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2183 visit_ref_for_mod_analysis
,
2184 visit_ref_for_mod_analysis
,
2185 visit_ref_for_mod_analysis
);
2187 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2188 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2189 visit_ref_for_mod_analysis
,
2190 visit_ref_for_mod_analysis
,
2191 visit_ref_for_mod_analysis
);
2194 /* Calculate controlled uses of parameters of NODE. */
2197 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2199 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2201 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2203 tree parm
= ipa_get_param (info
, i
);
2204 int controlled_uses
= 0;
2206 /* For SSA regs see if parameter is used. For non-SSA we compute
2207 the flag during modification analysis. */
2208 if (is_gimple_reg (parm
))
2210 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2212 if (ddef
&& !has_zero_uses (ddef
))
2214 imm_use_iterator imm_iter
;
2215 use_operand_p use_p
;
2217 ipa_set_param_used (info
, i
, true);
2218 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2219 if (!is_gimple_call (USE_STMT (use_p
)))
2221 if (!is_gimple_debug (USE_STMT (use_p
)))
2223 controlled_uses
= IPA_UNDESCRIBED_USE
;
2231 controlled_uses
= 0;
2234 controlled_uses
= IPA_UNDESCRIBED_USE
;
2235 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2239 /* Free stuff in BI. */
2242 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2244 bi
->cg_edges
.release ();
2245 bi
->param_aa_statuses
.release ();
2248 /* Dominator walker driving the analysis. */
2250 class analysis_dom_walker
: public dom_walker
2253 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
2254 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2256 virtual void before_dom_children (basic_block
);
2259 struct ipa_func_body_info
*m_fbi
;
2263 analysis_dom_walker::before_dom_children (basic_block bb
)
2265 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2266 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2269 /* Initialize the array describing properties of formal parameters
2270 of NODE, analyze their uses and compute jump functions associated
2271 with actual arguments of calls from within NODE. */
2274 ipa_analyze_node (struct cgraph_node
*node
)
2276 struct ipa_func_body_info fbi
;
2277 struct ipa_node_params
*info
;
2279 ipa_check_create_node_params ();
2280 ipa_check_create_edge_args ();
2281 info
= IPA_NODE_REF (node
);
2283 if (info
->analysis_done
)
2285 info
->analysis_done
= 1;
2287 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2289 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2291 ipa_set_param_used (info
, i
, true);
2292 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2297 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2299 calculate_dominance_info (CDI_DOMINATORS
);
2300 ipa_initialize_node_params (node
);
2301 ipa_analyze_controlled_uses (node
);
2304 fbi
.info
= IPA_NODE_REF (node
);
2305 fbi
.bb_infos
= vNULL
;
2306 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2307 fbi
.param_count
= ipa_get_param_count (info
);
2310 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2312 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2313 bi
->cg_edges
.safe_push (cs
);
2316 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2318 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2319 bi
->cg_edges
.safe_push (cs
);
2322 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2325 struct ipa_bb_info
*bi
;
2326 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
2327 free_ipa_bb_info (bi
);
2328 fbi
.bb_infos
.release ();
2329 free_dominance_info (CDI_DOMINATORS
);
2333 /* Update the jump functions associated with call graph edge E when the call
2334 graph edge CS is being inlined, assuming that E->caller is already (possibly
2335 indirectly) inlined into CS->callee and that E has not been inlined. */
2338 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2339 struct cgraph_edge
*e
)
2341 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2342 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2343 int count
= ipa_get_cs_argument_count (args
);
2346 for (i
= 0; i
< count
; i
++)
2348 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2349 struct ipa_polymorphic_call_context
*dst_ctx
2350 = ipa_get_ith_polymorhic_call_context (args
, i
);
2352 if (dst
->type
== IPA_JF_ANCESTOR
)
2354 struct ipa_jump_func
*src
;
2355 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2356 struct ipa_polymorphic_call_context
*src_ctx
2357 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2359 /* Variable number of arguments can cause havoc if we try to access
2360 one that does not exist in the inlined edge. So make sure we
2362 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2364 ipa_set_jf_unknown (dst
);
2368 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2370 if (src_ctx
&& !src_ctx
->useless_p ())
2372 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2374 /* TODO: Make type preserved safe WRT contexts. */
2375 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2376 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2377 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2378 if (!ctx
.useless_p ())
2380 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2382 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2384 dst_ctx
->combine_with (ctx
);
2388 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2390 struct ipa_agg_jf_item
*item
;
2393 /* Currently we do not produce clobber aggregate jump functions,
2394 replace with merging when we do. */
2395 gcc_assert (!dst
->agg
.items
);
2397 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2398 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2399 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2400 item
->offset
-= dst
->value
.ancestor
.offset
;
2403 if (src
->type
== IPA_JF_PASS_THROUGH
2404 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2406 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2407 dst
->value
.ancestor
.agg_preserved
&=
2408 src
->value
.pass_through
.agg_preserved
;
2410 else if (src
->type
== IPA_JF_ANCESTOR
)
2412 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2413 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2414 dst
->value
.ancestor
.agg_preserved
&=
2415 src
->value
.ancestor
.agg_preserved
;
2418 ipa_set_jf_unknown (dst
);
2420 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2422 struct ipa_jump_func
*src
;
2423 /* We must check range due to calls with variable number of arguments
2424 and we cannot combine jump functions with operations. */
2425 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2426 && (dst
->value
.pass_through
.formal_id
2427 < ipa_get_cs_argument_count (top
)))
2429 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2430 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2431 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2432 struct ipa_polymorphic_call_context
*src_ctx
2433 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2435 if (src_ctx
&& !src_ctx
->useless_p ())
2437 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2439 /* TODO: Make type preserved safe WRT contexts. */
2440 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2441 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2442 if (!ctx
.useless_p ())
2446 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2448 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2450 dst_ctx
->combine_with (ctx
);
2455 case IPA_JF_UNKNOWN
:
2456 ipa_set_jf_unknown (dst
);
2459 ipa_set_jf_cst_copy (dst
, src
);
2462 case IPA_JF_PASS_THROUGH
:
2464 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2465 enum tree_code operation
;
2466 operation
= ipa_get_jf_pass_through_operation (src
);
2468 if (operation
== NOP_EXPR
)
2472 && ipa_get_jf_pass_through_agg_preserved (src
);
2473 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2477 tree operand
= ipa_get_jf_pass_through_operand (src
);
2478 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2483 case IPA_JF_ANCESTOR
:
2487 && ipa_get_jf_ancestor_agg_preserved (src
);
2488 ipa_set_ancestor_jf (dst
,
2489 ipa_get_jf_ancestor_offset (src
),
2490 ipa_get_jf_ancestor_formal_id (src
),
2499 && (dst_agg_p
|| !src
->agg
.by_ref
))
2501 /* Currently we do not produce clobber aggregate jump
2502 functions, replace with merging when we do. */
2503 gcc_assert (!dst
->agg
.items
);
2505 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2506 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2510 ipa_set_jf_unknown (dst
);
2515 /* If TARGET is an addr_expr of a function declaration, make it the
2516 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2517 Otherwise, return NULL. */
2519 struct cgraph_edge
*
2520 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2523 struct cgraph_node
*callee
;
2524 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2525 bool unreachable
= false;
2527 if (TREE_CODE (target
) == ADDR_EXPR
)
2528 target
= TREE_OPERAND (target
, 0);
2529 if (TREE_CODE (target
) != FUNCTION_DECL
)
2531 target
= canonicalize_constructor_val (target
, NULL
);
2532 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2534 /* Member pointer call that goes through a VMT lookup. */
2535 if (ie
->indirect_info
->member_ptr
2536 /* Or if target is not an invariant expression and we do not
2537 know if it will evaulate to function at runtime.
2538 This can happen when folding through &VAR, where &VAR
2539 is IP invariant, but VAR itself is not.
2541 TODO: Revisit this when GCC 5 is branched. It seems that
2542 member_ptr check is not needed and that we may try to fold
2543 the expression and see if VAR is readonly. */
2544 || !is_gimple_ip_invariant (target
))
2546 if (dump_enabled_p ())
2548 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2549 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2550 "discovered direct call non-invariant "
2552 ie
->caller
->name (), ie
->caller
->order
);
2558 if (dump_enabled_p ())
2560 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2561 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2562 "discovered direct call to non-function in %s/%i, "
2563 "making it __builtin_unreachable\n",
2564 ie
->caller
->name (), ie
->caller
->order
);
2567 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2568 callee
= cgraph_node::get_create (target
);
2572 callee
= cgraph_node::get (target
);
2575 callee
= cgraph_node::get (target
);
2577 /* Because may-edges are not explicitely represented and vtable may be external,
2578 we may create the first reference to the object in the unit. */
2579 if (!callee
|| callee
->global
.inlined_to
)
2582 /* We are better to ensure we can refer to it.
2583 In the case of static functions we are out of luck, since we already
2584 removed its body. In the case of public functions we may or may
2585 not introduce the reference. */
2586 if (!canonicalize_constructor_val (target
, NULL
)
2587 || !TREE_PUBLIC (target
))
2590 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2591 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2592 xstrdup_for_dump (ie
->caller
->name ()),
2594 xstrdup_for_dump (ie
->callee
->name ()),
2598 callee
= cgraph_node::get_create (target
);
2601 /* If the edge is already speculated. */
2602 if (speculative
&& ie
->speculative
)
2604 struct cgraph_edge
*e2
;
2605 struct ipa_ref
*ref
;
2606 ie
->speculative_call_info (e2
, ie
, ref
);
2607 if (e2
->callee
->ultimate_alias_target ()
2608 != callee
->ultimate_alias_target ())
2611 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2612 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2613 xstrdup_for_dump (ie
->caller
->name ()),
2615 xstrdup_for_dump (callee
->name ()),
2617 xstrdup_for_dump (e2
->callee
->name ()),
2623 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2624 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2625 xstrdup_for_dump (ie
->caller
->name ()),
2627 xstrdup_for_dump (callee
->name ()),
2633 if (!dbg_cnt (devirt
))
2636 ipa_check_create_node_params ();
2638 /* We can not make edges to inline clones. It is bug that someone removed
2639 the cgraph node too early. */
2640 gcc_assert (!callee
->global
.inlined_to
);
2642 if (dump_file
&& !unreachable
)
2644 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2645 "(%s/%i -> %s/%i), for stmt ",
2646 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2647 speculative
? "speculative" : "known",
2648 xstrdup_for_dump (ie
->caller
->name ()),
2650 xstrdup_for_dump (callee
->name ()),
2653 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2655 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2657 if (dump_enabled_p ())
2659 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2661 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2662 "converting indirect call in %s to direct call to %s\n",
2663 ie
->caller
->name (), callee
->name ());
2667 struct cgraph_edge
*orig
= ie
;
2668 ie
= ie
->make_direct (callee
);
2669 /* If we resolved speculative edge the cost is already up to date
2670 for direct call (adjusted by inline_edge_duplication_hook). */
2673 es
= inline_edge_summary (ie
);
2674 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2675 - eni_size_weights
.call_cost
);
2676 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2677 - eni_time_weights
.call_cost
);
2682 if (!callee
->can_be_discarded_p ())
2685 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2689 /* make_speculative will update ie's cost to direct call cost. */
2690 ie
= ie
->make_speculative
2691 (callee
, ie
->count
* 8 / 10, ie
->frequency
* 8 / 10);
2697 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2698 return NULL if there is not any. BY_REF specifies whether the value has to
2699 be passed by reference or by value. */
2702 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2703 HOST_WIDE_INT offset
, bool by_ref
)
2705 struct ipa_agg_jf_item
*item
;
2708 if (by_ref
!= agg
->by_ref
)
2711 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2712 if (item
->offset
== offset
)
2714 /* Currently we do not have clobber values, return NULL for them once
2716 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2722 /* Remove a reference to SYMBOL from the list of references of a node given by
2723 reference description RDESC. Return true if the reference has been
2724 successfully found and removed. */
2727 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2729 struct ipa_ref
*to_del
;
2730 struct cgraph_edge
*origin
;
2735 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
2736 origin
->lto_stmt_uid
);
2740 to_del
->remove_reference ();
2742 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2743 xstrdup_for_dump (origin
->caller
->name ()),
2744 origin
->caller
->order
, xstrdup_for_dump (symbol
->name ()));
2748 /* If JFUNC has a reference description with refcount different from
2749 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2750 NULL. JFUNC must be a constant jump function. */
2752 static struct ipa_cst_ref_desc
*
2753 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2755 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2756 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2762 /* If the value of constant jump function JFUNC is an address of a function
2763 declaration, return the associated call graph node. Otherwise return
2766 static cgraph_node
*
2767 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2769 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2770 tree cst
= ipa_get_jf_constant (jfunc
);
2771 if (TREE_CODE (cst
) != ADDR_EXPR
2772 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2775 return cgraph_node::get (TREE_OPERAND (cst
, 0));
2779 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2780 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2781 the edge specified in the rdesc. Return false if either the symbol or the
2782 reference could not be found, otherwise return true. */
2785 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2787 struct ipa_cst_ref_desc
*rdesc
;
2788 if (jfunc
->type
== IPA_JF_CONST
2789 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2790 && --rdesc
->refcount
== 0)
2792 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2796 return remove_described_reference (symbol
, rdesc
);
2801 /* Try to find a destination for indirect edge IE that corresponds to a simple
2802 call or a call of a member function pointer and where the destination is a
2803 pointer formal parameter described by jump function JFUNC. If it can be
2804 determined, return the newly direct edge, otherwise return NULL.
2805 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2807 static struct cgraph_edge
*
2808 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2809 struct ipa_jump_func
*jfunc
,
2810 struct ipa_node_params
*new_root_info
)
2812 struct cgraph_edge
*cs
;
2814 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2816 if (ie
->indirect_info
->agg_contents
)
2817 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2818 ie
->indirect_info
->offset
,
2819 ie
->indirect_info
->by_ref
);
2821 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2824 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2826 if (cs
&& !agg_contents
)
2829 gcc_checking_assert (cs
->callee
2831 || jfunc
->type
!= IPA_JF_CONST
2832 || !cgraph_node_for_jfunc (jfunc
)
2833 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
2834 ok
= try_decrement_rdesc_refcount (jfunc
);
2835 gcc_checking_assert (ok
);
2841 /* Return the target to be used in cases of impossible devirtualization. IE
2842 and target (the latter can be NULL) are dumped when dumping is enabled. */
2845 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
2851 "Type inconsistent devirtualization: %s/%i->%s\n",
2852 ie
->caller
->name (), ie
->caller
->order
,
2853 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
2856 "No devirtualization target in %s/%i\n",
2857 ie
->caller
->name (), ie
->caller
->order
);
2859 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2860 cgraph_node::get_create (new_target
);
2864 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2865 call based on a formal parameter which is described by jump function JFUNC
2866 and if it can be determined, make it direct and return the direct edge.
2867 Otherwise, return NULL. CTX describes the polymorphic context that the
2868 parameter the call is based on brings along with it. */
2870 static struct cgraph_edge
*
2871 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2872 struct ipa_jump_func
*jfunc
,
2873 struct ipa_polymorphic_call_context ctx
)
2876 bool speculative
= false;
2878 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
2881 gcc_assert (!ie
->indirect_info
->by_ref
);
2883 /* Try to do lookup via known virtual table pointer value. */
2884 if (!ie
->indirect_info
->vptr_changed
2885 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
2888 unsigned HOST_WIDE_INT offset
;
2889 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2890 ie
->indirect_info
->offset
,
2892 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
2894 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
2898 if ((TREE_CODE (TREE_TYPE (t
)) == FUNCTION_TYPE
2899 && DECL_FUNCTION_CODE (t
) == BUILT_IN_UNREACHABLE
)
2900 || !possible_polymorphic_call_target_p
2901 (ie
, cgraph_node::get (t
)))
2903 /* Do not speculate builtin_unreachable, it is stupid! */
2904 if (!ie
->indirect_info
->vptr_changed
)
2905 target
= ipa_impossible_devirt_target (ie
, target
);
2910 speculative
= ie
->indirect_info
->vptr_changed
;
2916 ipa_polymorphic_call_context
ie_context (ie
);
2917 vec
<cgraph_node
*>targets
;
2920 ctx
.offset_by (ie
->indirect_info
->offset
);
2921 if (ie
->indirect_info
->vptr_changed
)
2922 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
2923 ie
->indirect_info
->otr_type
);
2924 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
2925 targets
= possible_polymorphic_call_targets
2926 (ie
->indirect_info
->otr_type
,
2927 ie
->indirect_info
->otr_token
,
2929 if (final
&& targets
.length () <= 1)
2931 speculative
= false;
2932 if (targets
.length () == 1)
2933 target
= targets
[0]->decl
;
2935 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
2937 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
2938 && !ie
->speculative
&& ie
->maybe_hot_p ())
2941 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
2942 ie
->indirect_info
->otr_token
,
2943 ie
->indirect_info
->context
);
2953 if (!possible_polymorphic_call_target_p
2954 (ie
, cgraph_node::get_create (target
)))
2958 target
= ipa_impossible_devirt_target (ie
, target
);
2960 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
2966 /* Update the param called notes associated with NODE when CS is being inlined,
2967 assuming NODE is (potentially indirectly) inlined into CS->callee.
2968 Moreover, if the callee is discovered to be constant, create a new cgraph
2969 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2970 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2973 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
2974 struct cgraph_node
*node
,
2975 vec
<cgraph_edge
*> *new_edges
)
2977 struct ipa_edge_args
*top
;
2978 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
2979 struct ipa_node_params
*new_root_info
;
2982 ipa_check_create_edge_args ();
2983 top
= IPA_EDGE_REF (cs
);
2984 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
2985 ? cs
->caller
->global
.inlined_to
2988 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
2990 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
2991 struct ipa_jump_func
*jfunc
;
2993 cgraph_node
*spec_target
= NULL
;
2995 next_ie
= ie
->next_callee
;
2997 if (ici
->param_index
== -1)
3000 /* We must check range due to calls with variable number of arguments: */
3001 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3003 ici
->param_index
= -1;
3007 param_index
= ici
->param_index
;
3008 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3010 if (ie
->speculative
)
3012 struct cgraph_edge
*de
;
3013 struct ipa_ref
*ref
;
3014 ie
->speculative_call_info (de
, ie
, ref
);
3015 spec_target
= de
->callee
;
3018 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3019 new_direct_edge
= NULL
;
3020 else if (ici
->polymorphic
)
3022 ipa_polymorphic_call_context ctx
;
3023 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3024 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3027 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3029 /* If speculation was removed, then we need to do nothing. */
3030 if (new_direct_edge
&& new_direct_edge
!= ie
3031 && new_direct_edge
->callee
== spec_target
)
3033 new_direct_edge
->indirect_inlining_edge
= 1;
3034 top
= IPA_EDGE_REF (cs
);
3036 if (!new_direct_edge
->speculative
)
3039 else if (new_direct_edge
)
3041 new_direct_edge
->indirect_inlining_edge
= 1;
3042 if (new_direct_edge
->call_stmt
)
3043 new_direct_edge
->call_stmt_cannot_inline_p
3044 = !gimple_check_call_matching_types (
3045 new_direct_edge
->call_stmt
,
3046 new_direct_edge
->callee
->decl
, false);
3049 new_edges
->safe_push (new_direct_edge
);
3052 top
= IPA_EDGE_REF (cs
);
3053 /* If speculative edge was introduced we still need to update
3054 call info of the indirect edge. */
3055 if (!new_direct_edge
->speculative
)
3058 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3059 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3061 if (ici
->agg_contents
3062 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3063 && !ici
->polymorphic
)
3064 ici
->param_index
= -1;
3067 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3068 if (ici
->polymorphic
3069 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3070 ici
->vptr_changed
= true;
3073 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3075 if (ici
->agg_contents
3076 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3077 && !ici
->polymorphic
)
3078 ici
->param_index
= -1;
3081 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3082 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3083 if (ici
->polymorphic
3084 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3085 ici
->vptr_changed
= true;
3089 /* Either we can find a destination for this edge now or never. */
3090 ici
->param_index
= -1;
3096 /* Recursively traverse subtree of NODE (including node) made of inlined
3097 cgraph_edges when CS has been inlined and invoke
3098 update_indirect_edges_after_inlining on all nodes and
3099 update_jump_functions_after_inlining on all non-inlined edges that lead out
3100 of this subtree. Newly discovered indirect edges will be added to
3101 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3105 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3106 struct cgraph_node
*node
,
3107 vec
<cgraph_edge
*> *new_edges
)
3109 struct cgraph_edge
*e
;
3112 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3114 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3115 if (!e
->inline_failed
)
3116 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3118 update_jump_functions_after_inlining (cs
, e
);
3119 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3120 update_jump_functions_after_inlining (cs
, e
);
3125 /* Combine two controlled uses counts as done during inlining. */
3128 combine_controlled_uses_counters (int c
, int d
)
3130 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3131 return IPA_UNDESCRIBED_USE
;
3136 /* Propagate number of controlled users from CS->caleee to the new root of the
3137 tree of inlined nodes. */
3140 propagate_controlled_uses (struct cgraph_edge
*cs
)
3142 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3143 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3144 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3145 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3146 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3149 count
= MIN (ipa_get_cs_argument_count (args
),
3150 ipa_get_param_count (old_root_info
));
3151 for (i
= 0; i
< count
; i
++)
3153 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3154 struct ipa_cst_ref_desc
*rdesc
;
3156 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3159 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3160 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3161 d
= ipa_get_controlled_uses (old_root_info
, i
);
3163 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3164 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3165 c
= combine_controlled_uses_counters (c
, d
);
3166 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3167 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3169 struct cgraph_node
*n
;
3170 struct ipa_ref
*ref
;
3171 tree t
= new_root_info
->known_csts
[src_idx
];
3173 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3174 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3175 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3176 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3179 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3180 "reference from %s/%i to %s/%i.\n",
3181 xstrdup_for_dump (new_root
->name ()),
3183 xstrdup_for_dump (n
->name ()), n
->order
);
3184 ref
->remove_reference ();
3188 else if (jf
->type
== IPA_JF_CONST
3189 && (rdesc
= jfunc_rdesc_usable (jf
)))
3191 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3192 int c
= rdesc
->refcount
;
3193 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3194 if (rdesc
->refcount
== 0)
3196 tree cst
= ipa_get_jf_constant (jf
);
3197 struct cgraph_node
*n
;
3198 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3199 && TREE_CODE (TREE_OPERAND (cst
, 0))
3201 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3204 struct cgraph_node
*clone
;
3206 ok
= remove_described_reference (n
, rdesc
);
3207 gcc_checking_assert (ok
);
3210 while (clone
->global
.inlined_to
3211 && clone
!= rdesc
->cs
->caller
3212 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3214 struct ipa_ref
*ref
;
3215 ref
= clone
->find_reference (n
, NULL
, 0);
3219 fprintf (dump_file
, "ipa-prop: Removing "
3220 "cloning-created reference "
3221 "from %s/%i to %s/%i.\n",
3222 xstrdup_for_dump (clone
->name ()),
3224 xstrdup_for_dump (n
->name ()),
3226 ref
->remove_reference ();
3228 clone
= clone
->callers
->caller
;
3235 for (i
= ipa_get_param_count (old_root_info
);
3236 i
< ipa_get_cs_argument_count (args
);
3239 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3241 if (jf
->type
== IPA_JF_CONST
)
3243 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3245 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3247 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3248 ipa_set_controlled_uses (new_root_info
,
3249 jf
->value
.pass_through
.formal_id
,
3250 IPA_UNDESCRIBED_USE
);
3254 /* Update jump functions and call note functions on inlining the call site CS.
3255 CS is expected to lead to a node already cloned by
3256 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3257 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3261 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3262 vec
<cgraph_edge
*> *new_edges
)
3265 /* Do nothing if the preparation phase has not been carried out yet
3266 (i.e. during early inlining). */
3267 if (!ipa_node_params_sum
)
3269 gcc_assert (ipa_edge_args_vector
);
3271 propagate_controlled_uses (cs
);
3272 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3277 /* Frees all dynamically allocated structures that the argument info points
3281 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3283 vec_free (args
->jump_functions
);
3284 memset (args
, 0, sizeof (*args
));
3287 /* Free all ipa_edge structures. */
3290 ipa_free_all_edge_args (void)
3293 struct ipa_edge_args
*args
;
3295 if (!ipa_edge_args_vector
)
3298 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3299 ipa_free_edge_args_substructures (args
);
3301 vec_free (ipa_edge_args_vector
);
3304 /* Frees all dynamically allocated structures that the param info points
3307 ipa_node_params::~ipa_node_params ()
3309 descriptors
.release ();
3311 /* Lattice values and their sources are deallocated with their alocation
3313 known_contexts
.release ();
3316 ipcp_orig_node
= NULL
;
3319 do_clone_for_all_contexts
= 0;
3320 is_all_contexts_clone
= 0;
3324 /* Free all ipa_node_params structures. */
3327 ipa_free_all_node_params (void)
3329 delete ipa_node_params_sum
;
3330 ipa_node_params_sum
= NULL
;
3333 /* Grow ipcp_transformations if necessary. */
3336 ipcp_grow_transformations_if_necessary (void)
3338 if (vec_safe_length (ipcp_transformations
)
3339 <= (unsigned) symtab
->cgraph_max_uid
)
3340 vec_safe_grow_cleared (ipcp_transformations
, symtab
->cgraph_max_uid
+ 1);
3343 /* Set the aggregate replacements of NODE to be AGGVALS. */
3346 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3347 struct ipa_agg_replacement_value
*aggvals
)
3349 ipcp_grow_transformations_if_necessary ();
3350 (*ipcp_transformations
)[node
->uid
].agg_values
= aggvals
;
3353 /* Hook that is called by cgraph.c when an edge is removed. */
3356 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3358 struct ipa_edge_args
*args
;
3360 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3361 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3364 args
= IPA_EDGE_REF (cs
);
3365 if (args
->jump_functions
)
3367 struct ipa_jump_func
*jf
;
3369 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3371 struct ipa_cst_ref_desc
*rdesc
;
3372 try_decrement_rdesc_refcount (jf
);
3373 if (jf
->type
== IPA_JF_CONST
3374 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3380 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3383 /* Hook that is called by cgraph.c when an edge is duplicated. */
3386 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3389 struct ipa_edge_args
*old_args
, *new_args
;
3392 ipa_check_create_edge_args ();
3394 old_args
= IPA_EDGE_REF (src
);
3395 new_args
= IPA_EDGE_REF (dst
);
3397 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3398 if (old_args
->polymorphic_call_contexts
)
3399 new_args
->polymorphic_call_contexts
3400 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3402 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3404 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3405 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3407 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3409 if (src_jf
->type
== IPA_JF_CONST
)
3411 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3414 dst_jf
->value
.constant
.rdesc
= NULL
;
3415 else if (src
->caller
== dst
->caller
)
3417 struct ipa_ref
*ref
;
3418 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3419 gcc_checking_assert (n
);
3420 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3422 gcc_checking_assert (ref
);
3423 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3425 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3426 dst_rdesc
->cs
= dst
;
3427 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3428 dst_rdesc
->next_duplicate
= NULL
;
3429 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3431 else if (src_rdesc
->cs
== src
)
3433 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3434 dst_rdesc
->cs
= dst
;
3435 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3436 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3437 src_rdesc
->next_duplicate
= dst_rdesc
;
3438 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3442 struct ipa_cst_ref_desc
*dst_rdesc
;
3443 /* This can happen during inlining, when a JFUNC can refer to a
3444 reference taken in a function up in the tree of inline clones.
3445 We need to find the duplicate that refers to our tree of
3448 gcc_assert (dst
->caller
->global
.inlined_to
);
3449 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3451 dst_rdesc
= dst_rdesc
->next_duplicate
)
3453 struct cgraph_node
*top
;
3454 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3455 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3456 : dst_rdesc
->cs
->caller
;
3457 if (dst
->caller
->global
.inlined_to
== top
)
3460 gcc_assert (dst_rdesc
);
3461 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3464 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3465 && src
->caller
== dst
->caller
)
3467 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3468 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3469 struct ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3470 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3472 int c
= ipa_get_controlled_uses (root_info
, idx
);
3473 if (c
!= IPA_UNDESCRIBED_USE
)
3476 ipa_set_controlled_uses (root_info
, idx
, c
);
3482 /* Analyze newly added function into callgraph. */
3485 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3487 if (node
->has_gimple_body_p ())
3488 ipa_analyze_node (node
);
3491 /* Hook that is called by summary when a node is duplicated. */
3494 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3495 ipa_node_params
*old_info
,
3496 ipa_node_params
*new_info
)
3498 ipa_agg_replacement_value
*old_av
, *new_av
;
3500 new_info
->descriptors
= old_info
->descriptors
.copy ();
3501 new_info
->lattices
= NULL
;
3502 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3504 new_info
->analysis_done
= old_info
->analysis_done
;
3505 new_info
->node_enqueued
= old_info
->node_enqueued
;
3507 old_av
= ipa_get_agg_replacements_for_node (src
);
3513 struct ipa_agg_replacement_value
*v
;
3515 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3516 memcpy (v
, old_av
, sizeof (*v
));
3519 old_av
= old_av
->next
;
3521 ipa_set_node_agg_value_chain (dst
, new_av
);
3524 ipcp_transformation_summary
*src_trans
= ipcp_get_transformation_summary (src
);
3526 if (src_trans
&& vec_safe_length (src_trans
->alignments
) > 0)
3528 ipcp_grow_transformations_if_necessary ();
3529 src_trans
= ipcp_get_transformation_summary (src
);
3530 const vec
<ipa_alignment
, va_gc
> *src_alignments
= src_trans
->alignments
;
3531 vec
<ipa_alignment
, va_gc
> *&dst_alignments
3532 = ipcp_get_transformation_summary (dst
)->alignments
;
3533 vec_safe_reserve_exact (dst_alignments
, src_alignments
->length ());
3534 for (unsigned i
= 0; i
< src_alignments
->length (); ++i
)
3535 dst_alignments
->quick_push ((*src_alignments
)[i
]);
3539 /* Register our cgraph hooks if they are not already there. */
3542 ipa_register_cgraph_hooks (void)
3544 ipa_check_create_node_params ();
3546 if (!edge_removal_hook_holder
)
3547 edge_removal_hook_holder
=
3548 symtab
->add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3549 if (!edge_duplication_hook_holder
)
3550 edge_duplication_hook_holder
=
3551 symtab
->add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3552 function_insertion_hook_holder
=
3553 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3556 /* Unregister our cgraph hooks if they are not already there. */
3559 ipa_unregister_cgraph_hooks (void)
3561 symtab
->remove_edge_removal_hook (edge_removal_hook_holder
);
3562 edge_removal_hook_holder
= NULL
;
3563 symtab
->remove_edge_duplication_hook (edge_duplication_hook_holder
);
3564 edge_duplication_hook_holder
= NULL
;
3565 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3566 function_insertion_hook_holder
= NULL
;
3569 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3570 longer needed after ipa-cp. */
3573 ipa_free_all_structures_after_ipa_cp (void)
3575 if (!optimize
&& !in_lto_p
)
3577 ipa_free_all_edge_args ();
3578 ipa_free_all_node_params ();
3579 ipcp_sources_pool
.release ();
3580 ipcp_cst_values_pool
.release ();
3581 ipcp_poly_ctx_values_pool
.release ();
3582 ipcp_agg_lattice_pool
.release ();
3583 ipa_unregister_cgraph_hooks ();
3584 ipa_refdesc_pool
.release ();
3588 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3589 longer needed after indirect inlining. */
3592 ipa_free_all_structures_after_iinln (void)
3594 ipa_free_all_edge_args ();
3595 ipa_free_all_node_params ();
3596 ipa_unregister_cgraph_hooks ();
3597 ipcp_sources_pool
.release ();
3598 ipcp_cst_values_pool
.release ();
3599 ipcp_poly_ctx_values_pool
.release ();
3600 ipcp_agg_lattice_pool
.release ();
3601 ipa_refdesc_pool
.release ();
3604 /* Print ipa_tree_map data structures of all functions in the
3608 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3611 struct ipa_node_params
*info
;
3613 if (!node
->definition
)
3615 info
= IPA_NODE_REF (node
);
3616 fprintf (f
, " function %s/%i parameter descriptors:\n",
3617 node
->name (), node
->order
);
3618 count
= ipa_get_param_count (info
);
3619 for (i
= 0; i
< count
; i
++)
3624 ipa_dump_param (f
, info
, i
);
3625 if (ipa_is_param_used (info
, i
))
3626 fprintf (f
, " used");
3627 c
= ipa_get_controlled_uses (info
, i
);
3628 if (c
== IPA_UNDESCRIBED_USE
)
3629 fprintf (f
, " undescribed_use");
3631 fprintf (f
, " controlled_uses=%i", c
);
3636 /* Print ipa_tree_map data structures of all functions in the
3640 ipa_print_all_params (FILE * f
)
3642 struct cgraph_node
*node
;
3644 fprintf (f
, "\nFunction parameters:\n");
3645 FOR_EACH_FUNCTION (node
)
3646 ipa_print_node_params (f
, node
);
3649 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3652 ipa_get_vector_of_formal_parms (tree fndecl
)
3658 gcc_assert (!flag_wpa
);
3659 count
= count_formal_params (fndecl
);
3660 args
.create (count
);
3661 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3662 args
.quick_push (parm
);
3667 /* Return a heap allocated vector containing types of formal parameters of
3668 function type FNTYPE. */
3671 ipa_get_vector_of_formal_parm_types (tree fntype
)
3677 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3680 types
.create (count
);
3681 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3682 types
.quick_push (TREE_VALUE (t
));
3687 /* Modify the function declaration FNDECL and its type according to the plan in
3688 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3689 to reflect the actual parameters being modified which are determined by the
3690 base_index field. */
3693 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3695 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3696 tree orig_type
= TREE_TYPE (fndecl
);
3697 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3699 /* The following test is an ugly hack, some functions simply don't have any
3700 arguments in their type. This is probably a bug but well... */
3701 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3702 bool last_parm_void
;
3706 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3708 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3710 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3712 gcc_assert (oparms
.length () == otypes
.length ());
3716 last_parm_void
= false;
3720 int len
= adjustments
.length ();
3721 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3722 tree new_arg_types
= NULL
;
3723 for (int i
= 0; i
< len
; i
++)
3725 struct ipa_parm_adjustment
*adj
;
3728 adj
= &adjustments
[i
];
3730 if (adj
->op
== IPA_PARM_OP_NEW
)
3733 parm
= oparms
[adj
->base_index
];
3736 if (adj
->op
== IPA_PARM_OP_COPY
)
3739 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3742 link
= &DECL_CHAIN (parm
);
3744 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3750 ptype
= build_pointer_type (adj
->type
);
3754 if (is_gimple_reg_type (ptype
))
3756 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3757 if (TYPE_ALIGN (ptype
) < malign
)
3758 ptype
= build_aligned_type (ptype
, malign
);
3763 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3765 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3767 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3768 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3769 DECL_ARTIFICIAL (new_parm
) = 1;
3770 DECL_ARG_TYPE (new_parm
) = ptype
;
3771 DECL_CONTEXT (new_parm
) = fndecl
;
3772 TREE_USED (new_parm
) = 1;
3773 DECL_IGNORED_P (new_parm
) = 1;
3774 layout_decl (new_parm
, 0);
3776 if (adj
->op
== IPA_PARM_OP_NEW
)
3780 adj
->new_decl
= new_parm
;
3783 link
= &DECL_CHAIN (new_parm
);
3789 tree new_reversed
= NULL
;
3792 new_reversed
= nreverse (new_arg_types
);
3796 TREE_CHAIN (new_arg_types
) = void_list_node
;
3798 new_reversed
= void_list_node
;
3802 /* Use copy_node to preserve as much as possible from original type
3803 (debug info, attribute lists etc.)
3804 Exception is METHOD_TYPEs must have THIS argument.
3805 When we are asked to remove it, we need to build new FUNCTION_TYPE
3807 tree new_type
= NULL
;
3808 if (TREE_CODE (orig_type
) != METHOD_TYPE
3809 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3810 && adjustments
[0].base_index
== 0))
3812 new_type
= build_distinct_type_copy (orig_type
);
3813 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3818 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3820 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3821 DECL_VINDEX (fndecl
) = NULL_TREE
;
3824 /* When signature changes, we need to clear builtin info. */
3825 if (DECL_BUILT_IN (fndecl
))
3827 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3828 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3831 TREE_TYPE (fndecl
) = new_type
;
3832 DECL_VIRTUAL_P (fndecl
) = 0;
3833 DECL_LANG_SPECIFIC (fndecl
) = NULL
;
3838 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3839 If this is a directly recursive call, CS must be NULL. Otherwise it must
3840 contain the corresponding call graph edge. */
3843 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gcall
*stmt
,
3844 ipa_parm_adjustment_vec adjustments
)
3846 struct cgraph_node
*current_node
= cgraph_node::get (current_function_decl
);
3848 vec
<tree
, va_gc
> **debug_args
= NULL
;
3850 gimple_stmt_iterator gsi
, prev_gsi
;
3854 len
= adjustments
.length ();
3856 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
3857 current_node
->remove_stmt_references (stmt
);
3859 gsi
= gsi_for_stmt (stmt
);
3861 gsi_prev (&prev_gsi
);
3862 for (i
= 0; i
< len
; i
++)
3864 struct ipa_parm_adjustment
*adj
;
3866 adj
= &adjustments
[i
];
3868 if (adj
->op
== IPA_PARM_OP_COPY
)
3870 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3872 vargs
.quick_push (arg
);
3874 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3876 tree expr
, base
, off
;
3878 unsigned int deref_align
= 0;
3879 bool deref_base
= false;
3881 /* We create a new parameter out of the value of the old one, we can
3882 do the following kind of transformations:
3884 - A scalar passed by reference is converted to a scalar passed by
3885 value. (adj->by_ref is false and the type of the original
3886 actual argument is a pointer to a scalar).
3888 - A part of an aggregate is passed instead of the whole aggregate.
3889 The part can be passed either by value or by reference, this is
3890 determined by value of adj->by_ref. Moreover, the code below
3891 handles both situations when the original aggregate is passed by
3892 value (its type is not a pointer) and when it is passed by
3893 reference (it is a pointer to an aggregate).
3895 When the new argument is passed by reference (adj->by_ref is true)
3896 it must be a part of an aggregate and therefore we form it by
3897 simply taking the address of a reference inside the original
3900 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
3901 base
= gimple_call_arg (stmt
, adj
->base_index
);
3902 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
3903 : EXPR_LOCATION (base
);
3905 if (TREE_CODE (base
) != ADDR_EXPR
3906 && POINTER_TYPE_P (TREE_TYPE (base
)))
3907 off
= build_int_cst (adj
->alias_ptr_type
,
3908 adj
->offset
/ BITS_PER_UNIT
);
3911 HOST_WIDE_INT base_offset
;
3915 if (TREE_CODE (base
) == ADDR_EXPR
)
3917 base
= TREE_OPERAND (base
, 0);
3923 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
3924 /* Aggregate arguments can have non-invariant addresses. */
3927 base
= build_fold_addr_expr (prev_base
);
3928 off
= build_int_cst (adj
->alias_ptr_type
,
3929 adj
->offset
/ BITS_PER_UNIT
);
3931 else if (TREE_CODE (base
) == MEM_REF
)
3936 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
3938 off
= build_int_cst (adj
->alias_ptr_type
,
3940 + adj
->offset
/ BITS_PER_UNIT
);
3941 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
3943 base
= TREE_OPERAND (base
, 0);
3947 off
= build_int_cst (adj
->alias_ptr_type
,
3949 + adj
->offset
/ BITS_PER_UNIT
);
3950 base
= build_fold_addr_expr (base
);
3956 tree type
= adj
->type
;
3958 unsigned HOST_WIDE_INT misalign
;
3962 align
= deref_align
;
3967 get_pointer_alignment_1 (base
, &align
, &misalign
);
3968 if (TYPE_ALIGN (type
) > align
)
3969 align
= TYPE_ALIGN (type
);
3971 misalign
+= (offset_int::from (off
, SIGNED
).to_short_addr ()
3973 misalign
= misalign
& (align
- 1);
3975 align
= (misalign
& -misalign
);
3976 if (align
< TYPE_ALIGN (type
))
3977 type
= build_aligned_type (type
, align
);
3978 base
= force_gimple_operand_gsi (&gsi
, base
,
3979 true, NULL
, true, GSI_SAME_STMT
);
3980 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
3981 /* If expr is not a valid gimple call argument emit
3982 a load into a temporary. */
3983 if (is_gimple_reg_type (TREE_TYPE (expr
)))
3985 gimple tem
= gimple_build_assign (NULL_TREE
, expr
);
3986 if (gimple_in_ssa_p (cfun
))
3988 gimple_set_vuse (tem
, gimple_vuse (stmt
));
3989 expr
= make_ssa_name (TREE_TYPE (expr
), tem
);
3992 expr
= create_tmp_reg (TREE_TYPE (expr
));
3993 gimple_assign_set_lhs (tem
, expr
);
3994 gsi_insert_before (&gsi
, tem
, GSI_SAME_STMT
);
3999 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
4000 expr
= build_fold_addr_expr (expr
);
4001 expr
= force_gimple_operand_gsi (&gsi
, expr
,
4002 true, NULL
, true, GSI_SAME_STMT
);
4004 vargs
.quick_push (expr
);
4006 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
4009 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
4012 arg
= gimple_call_arg (stmt
, adj
->base_index
);
4013 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
4015 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
4017 arg
= fold_convert_loc (gimple_location (stmt
),
4018 TREE_TYPE (origin
), arg
);
4020 if (debug_args
== NULL
)
4021 debug_args
= decl_debug_args_insert (callee_decl
);
4022 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
4023 if (ddecl
== origin
)
4025 ddecl
= (**debug_args
)[ix
+ 1];
4030 ddecl
= make_node (DEBUG_EXPR_DECL
);
4031 DECL_ARTIFICIAL (ddecl
) = 1;
4032 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
4033 DECL_MODE (ddecl
) = DECL_MODE (origin
);
4035 vec_safe_push (*debug_args
, origin
);
4036 vec_safe_push (*debug_args
, ddecl
);
4038 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
4039 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
4043 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4045 fprintf (dump_file
, "replacing stmt:");
4046 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
4049 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
4051 if (gimple_call_lhs (stmt
))
4052 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
4054 gimple_set_block (new_stmt
, gimple_block (stmt
));
4055 if (gimple_has_location (stmt
))
4056 gimple_set_location (new_stmt
, gimple_location (stmt
));
4057 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
4058 gimple_call_copy_flags (new_stmt
, stmt
);
4059 if (gimple_in_ssa_p (cfun
))
4061 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4062 if (gimple_vdef (stmt
))
4064 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4065 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
4069 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4071 fprintf (dump_file
, "with stmt:");
4072 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
4073 fprintf (dump_file
, "\n");
4075 gsi_replace (&gsi
, new_stmt
, true);
4077 cs
->set_call_stmt (new_stmt
);
4080 current_node
->record_stmt_references (gsi_stmt (gsi
));
4083 while (gsi_stmt (gsi
) != gsi_stmt (prev_gsi
));
4086 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4087 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4088 specifies whether the function should care about type incompatibility the
4089 current and new expressions. If it is false, the function will leave
4090 incompatibility issues to the caller. Return true iff the expression
4094 ipa_modify_expr (tree
*expr
, bool convert
,
4095 ipa_parm_adjustment_vec adjustments
)
4097 struct ipa_parm_adjustment
*cand
4098 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
4104 src
= build_simple_mem_ref (cand
->new_decl
);
4106 src
= cand
->new_decl
;
4108 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4110 fprintf (dump_file
, "About to replace expr ");
4111 print_generic_expr (dump_file
, *expr
, 0);
4112 fprintf (dump_file
, " with ");
4113 print_generic_expr (dump_file
, src
, 0);
4114 fprintf (dump_file
, "\n");
4117 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4119 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4127 /* If T is an SSA_NAME, return NULL if it is not a default def or
4128 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4129 the base variable is always returned, regardless if it is a default
4130 def. Return T if it is not an SSA_NAME. */
4133 get_ssa_base_param (tree t
, bool ignore_default_def
)
4135 if (TREE_CODE (t
) == SSA_NAME
)
4137 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
4138 return SSA_NAME_VAR (t
);
4145 /* Given an expression, return an adjustment entry specifying the
4146 transformation to be done on EXPR. If no suitable adjustment entry
4147 was found, returns NULL.
4149 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4150 default def, otherwise bail on them.
4152 If CONVERT is non-NULL, this function will set *CONVERT if the
4153 expression provided is a component reference. ADJUSTMENTS is the
4154 adjustments vector. */
4156 ipa_parm_adjustment
*
4157 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
4158 ipa_parm_adjustment_vec adjustments
,
4159 bool ignore_default_def
)
4161 if (TREE_CODE (**expr
) == BIT_FIELD_REF
4162 || TREE_CODE (**expr
) == IMAGPART_EXPR
4163 || TREE_CODE (**expr
) == REALPART_EXPR
)
4165 *expr
= &TREE_OPERAND (**expr
, 0);
4170 HOST_WIDE_INT offset
, size
, max_size
;
4171 tree base
= get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
);
4172 if (!base
|| size
== -1 || max_size
== -1)
4175 if (TREE_CODE (base
) == MEM_REF
)
4177 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
4178 base
= TREE_OPERAND (base
, 0);
4181 base
= get_ssa_base_param (base
, ignore_default_def
);
4182 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4185 struct ipa_parm_adjustment
*cand
= NULL
;
4186 unsigned int len
= adjustments
.length ();
4187 for (unsigned i
= 0; i
< len
; i
++)
4189 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
4191 if (adj
->base
== base
4192 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
4199 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
4204 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4207 index_in_adjustments_multiple_times_p (int base_index
,
4208 ipa_parm_adjustment_vec adjustments
)
4210 int i
, len
= adjustments
.length ();
4213 for (i
= 0; i
< len
; i
++)
4215 struct ipa_parm_adjustment
*adj
;
4216 adj
= &adjustments
[i
];
4218 if (adj
->base_index
== base_index
)
4230 /* Return adjustments that should have the same effect on function parameters
4231 and call arguments as if they were first changed according to adjustments in
4232 INNER and then by adjustments in OUTER. */
4234 ipa_parm_adjustment_vec
4235 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
4236 ipa_parm_adjustment_vec outer
)
4238 int i
, outlen
= outer
.length ();
4239 int inlen
= inner
.length ();
4241 ipa_parm_adjustment_vec adjustments
, tmp
;
4244 for (i
= 0; i
< inlen
; i
++)
4246 struct ipa_parm_adjustment
*n
;
4249 if (n
->op
== IPA_PARM_OP_REMOVE
)
4253 /* FIXME: Handling of new arguments are not implemented yet. */
4254 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
4255 tmp
.quick_push (*n
);
4259 adjustments
.create (outlen
+ removals
);
4260 for (i
= 0; i
< outlen
; i
++)
4262 struct ipa_parm_adjustment r
;
4263 struct ipa_parm_adjustment
*out
= &outer
[i
];
4264 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
4266 memset (&r
, 0, sizeof (r
));
4267 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
4268 if (out
->op
== IPA_PARM_OP_REMOVE
)
4270 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
4272 r
.op
= IPA_PARM_OP_REMOVE
;
4273 adjustments
.quick_push (r
);
4279 /* FIXME: Handling of new arguments are not implemented yet. */
4280 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
4283 r
.base_index
= in
->base_index
;
4286 /* FIXME: Create nonlocal value too. */
4288 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4289 r
.op
= IPA_PARM_OP_COPY
;
4290 else if (in
->op
== IPA_PARM_OP_COPY
)
4291 r
.offset
= out
->offset
;
4292 else if (out
->op
== IPA_PARM_OP_COPY
)
4293 r
.offset
= in
->offset
;
4295 r
.offset
= in
->offset
+ out
->offset
;
4296 adjustments
.quick_push (r
);
4299 for (i
= 0; i
< inlen
; i
++)
4301 struct ipa_parm_adjustment
*n
= &inner
[i
];
4303 if (n
->op
== IPA_PARM_OP_REMOVE
)
4304 adjustments
.quick_push (*n
);
4311 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4312 friendly way, assuming they are meant to be applied to FNDECL. */
4315 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4318 int i
, len
= adjustments
.length ();
4320 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4322 fprintf (file
, "IPA param adjustments: ");
4323 for (i
= 0; i
< len
; i
++)
4325 struct ipa_parm_adjustment
*adj
;
4326 adj
= &adjustments
[i
];
4329 fprintf (file
, " ");
4333 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4334 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4337 fprintf (file
, ", base: ");
4338 print_generic_expr (file
, adj
->base
, 0);
4342 fprintf (file
, ", new_decl: ");
4343 print_generic_expr (file
, adj
->new_decl
, 0);
4345 if (adj
->new_ssa_base
)
4347 fprintf (file
, ", new_ssa_base: ");
4348 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4351 if (adj
->op
== IPA_PARM_OP_COPY
)
4352 fprintf (file
, ", copy_param");
4353 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4354 fprintf (file
, ", remove_param");
4356 fprintf (file
, ", offset %li", (long) adj
->offset
);
4358 fprintf (file
, ", by_ref");
4359 print_node_brief (file
, ", type: ", adj
->type
, 0);
4360 fprintf (file
, "\n");
4365 /* Dump the AV linked list. */
4368 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4371 fprintf (f
, " Aggregate replacements:");
4372 for (; av
; av
= av
->next
)
4374 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4375 av
->index
, av
->offset
);
4376 print_generic_expr (f
, av
->value
, 0);
4382 /* Stream out jump function JUMP_FUNC to OB. */
4385 ipa_write_jump_function (struct output_block
*ob
,
4386 struct ipa_jump_func
*jump_func
)
4388 struct ipa_agg_jf_item
*item
;
4389 struct bitpack_d bp
;
4392 streamer_write_uhwi (ob
, jump_func
->type
);
4393 switch (jump_func
->type
)
4395 case IPA_JF_UNKNOWN
:
4399 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4400 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4402 case IPA_JF_PASS_THROUGH
:
4403 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4404 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4406 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4407 bp
= bitpack_create (ob
->main_stream
);
4408 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4409 streamer_write_bitpack (&bp
);
4413 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4414 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4417 case IPA_JF_ANCESTOR
:
4418 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4419 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4420 bp
= bitpack_create (ob
->main_stream
);
4421 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4422 streamer_write_bitpack (&bp
);
4426 count
= vec_safe_length (jump_func
->agg
.items
);
4427 streamer_write_uhwi (ob
, count
);
4430 bp
= bitpack_create (ob
->main_stream
);
4431 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4432 streamer_write_bitpack (&bp
);
4435 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4437 streamer_write_uhwi (ob
, item
->offset
);
4438 stream_write_tree (ob
, item
->value
, true);
4441 bp
= bitpack_create (ob
->main_stream
);
4442 bp_pack_value (&bp
, jump_func
->alignment
.known
, 1);
4443 streamer_write_bitpack (&bp
);
4444 if (jump_func
->alignment
.known
)
4446 streamer_write_uhwi (ob
, jump_func
->alignment
.align
);
4447 streamer_write_uhwi (ob
, jump_func
->alignment
.misalign
);
4451 /* Read in jump function JUMP_FUNC from IB. */
4454 ipa_read_jump_function (struct lto_input_block
*ib
,
4455 struct ipa_jump_func
*jump_func
,
4456 struct cgraph_edge
*cs
,
4457 struct data_in
*data_in
)
4459 enum jump_func_type jftype
;
4460 enum tree_code operation
;
4463 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4466 case IPA_JF_UNKNOWN
:
4467 ipa_set_jf_unknown (jump_func
);
4470 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4472 case IPA_JF_PASS_THROUGH
:
4473 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4474 if (operation
== NOP_EXPR
)
4476 int formal_id
= streamer_read_uhwi (ib
);
4477 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4478 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4479 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4483 tree operand
= stream_read_tree (ib
, data_in
);
4484 int formal_id
= streamer_read_uhwi (ib
);
4485 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4489 case IPA_JF_ANCESTOR
:
4491 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4492 int formal_id
= streamer_read_uhwi (ib
);
4493 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4494 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4495 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4500 count
= streamer_read_uhwi (ib
);
4501 vec_alloc (jump_func
->agg
.items
, count
);
4504 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4505 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4507 for (i
= 0; i
< count
; i
++)
4509 struct ipa_agg_jf_item item
;
4510 item
.offset
= streamer_read_uhwi (ib
);
4511 item
.value
= stream_read_tree (ib
, data_in
);
4512 jump_func
->agg
.items
->quick_push (item
);
4515 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4516 bool alignment_known
= bp_unpack_value (&bp
, 1);
4517 if (alignment_known
)
4519 jump_func
->alignment
.known
= true;
4520 jump_func
->alignment
.align
= streamer_read_uhwi (ib
);
4521 jump_func
->alignment
.misalign
= streamer_read_uhwi (ib
);
4524 jump_func
->alignment
.known
= false;
4527 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4528 relevant to indirect inlining to OB. */
4531 ipa_write_indirect_edge_info (struct output_block
*ob
,
4532 struct cgraph_edge
*cs
)
4534 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4535 struct bitpack_d bp
;
4537 streamer_write_hwi (ob
, ii
->param_index
);
4538 bp
= bitpack_create (ob
->main_stream
);
4539 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4540 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4541 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4542 bp_pack_value (&bp
, ii
->by_ref
, 1);
4543 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4544 streamer_write_bitpack (&bp
);
4545 if (ii
->agg_contents
|| ii
->polymorphic
)
4546 streamer_write_hwi (ob
, ii
->offset
);
4548 gcc_assert (ii
->offset
== 0);
4550 if (ii
->polymorphic
)
4552 streamer_write_hwi (ob
, ii
->otr_token
);
4553 stream_write_tree (ob
, ii
->otr_type
, true);
4554 ii
->context
.stream_out (ob
);
4558 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4559 relevant to indirect inlining from IB. */
4562 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4563 struct data_in
*data_in
,
4564 struct cgraph_edge
*cs
)
4566 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4567 struct bitpack_d bp
;
4569 ii
->param_index
= (int) streamer_read_hwi (ib
);
4570 bp
= streamer_read_bitpack (ib
);
4571 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4572 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4573 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4574 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4575 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4576 if (ii
->agg_contents
|| ii
->polymorphic
)
4577 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4580 if (ii
->polymorphic
)
4582 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4583 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4584 ii
->context
.stream_in (ib
, data_in
);
4588 /* Stream out NODE info to OB. */
4591 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4594 lto_symtab_encoder_t encoder
;
4595 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4597 struct cgraph_edge
*e
;
4598 struct bitpack_d bp
;
4600 encoder
= ob
->decl_state
->symtab_node_encoder
;
4601 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4602 streamer_write_uhwi (ob
, node_ref
);
4604 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4605 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4606 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4607 bp
= bitpack_create (ob
->main_stream
);
4608 gcc_assert (info
->analysis_done
4609 || ipa_get_param_count (info
) == 0);
4610 gcc_assert (!info
->node_enqueued
);
4611 gcc_assert (!info
->ipcp_orig_node
);
4612 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4613 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4614 streamer_write_bitpack (&bp
);
4615 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4616 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4617 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4619 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4621 streamer_write_uhwi (ob
,
4622 ipa_get_cs_argument_count (args
) * 2
4623 + (args
->polymorphic_call_contexts
!= NULL
));
4624 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4626 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4627 if (args
->polymorphic_call_contexts
!= NULL
)
4628 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4631 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4633 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4635 streamer_write_uhwi (ob
,
4636 ipa_get_cs_argument_count (args
) * 2
4637 + (args
->polymorphic_call_contexts
!= NULL
));
4638 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4640 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4641 if (args
->polymorphic_call_contexts
!= NULL
)
4642 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4644 ipa_write_indirect_edge_info (ob
, e
);
4648 /* Stream in NODE info from IB. */
4651 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4652 struct data_in
*data_in
)
4654 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4656 struct cgraph_edge
*e
;
4657 struct bitpack_d bp
;
4659 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4661 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4662 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4664 bp
= streamer_read_bitpack (ib
);
4665 if (ipa_get_param_count (info
) != 0)
4666 info
->analysis_done
= true;
4667 info
->node_enqueued
= false;
4668 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4669 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4670 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4671 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4672 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4674 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4675 int count
= streamer_read_uhwi (ib
);
4676 bool contexts_computed
= count
& 1;
4681 vec_safe_grow_cleared (args
->jump_functions
, count
);
4682 if (contexts_computed
)
4683 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4685 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4687 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4689 if (contexts_computed
)
4690 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4693 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4695 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4696 int count
= streamer_read_uhwi (ib
);
4697 bool contexts_computed
= count
& 1;
4702 vec_safe_grow_cleared (args
->jump_functions
, count
);
4703 if (contexts_computed
)
4704 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4705 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4707 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4709 if (contexts_computed
)
4710 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4713 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4717 /* Write jump functions for nodes in SET. */
4720 ipa_prop_write_jump_functions (void)
4722 struct cgraph_node
*node
;
4723 struct output_block
*ob
;
4724 unsigned int count
= 0;
4725 lto_symtab_encoder_iterator lsei
;
4726 lto_symtab_encoder_t encoder
;
4728 if (!ipa_node_params_sum
)
4731 ob
= create_output_block (LTO_section_jump_functions
);
4732 encoder
= ob
->decl_state
->symtab_node_encoder
;
4734 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4735 lsei_next_function_in_partition (&lsei
))
4737 node
= lsei_cgraph_node (lsei
);
4738 if (node
->has_gimple_body_p ()
4739 && IPA_NODE_REF (node
) != NULL
)
4743 streamer_write_uhwi (ob
, count
);
4745 /* Process all of the functions. */
4746 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4747 lsei_next_function_in_partition (&lsei
))
4749 node
= lsei_cgraph_node (lsei
);
4750 if (node
->has_gimple_body_p ()
4751 && IPA_NODE_REF (node
) != NULL
)
4752 ipa_write_node_info (ob
, node
);
4754 streamer_write_char_stream (ob
->main_stream
, 0);
4755 produce_asm (ob
, NULL
);
4756 destroy_output_block (ob
);
4759 /* Read section in file FILE_DATA of length LEN with data DATA. */
4762 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4765 const struct lto_function_header
*header
=
4766 (const struct lto_function_header
*) data
;
4767 const int cfg_offset
= sizeof (struct lto_function_header
);
4768 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4769 const int string_offset
= main_offset
+ header
->main_size
;
4770 struct data_in
*data_in
;
4774 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4775 header
->main_size
, file_data
->mode_table
);
4778 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4779 header
->string_size
, vNULL
);
4780 count
= streamer_read_uhwi (&ib_main
);
4782 for (i
= 0; i
< count
; i
++)
4785 struct cgraph_node
*node
;
4786 lto_symtab_encoder_t encoder
;
4788 index
= streamer_read_uhwi (&ib_main
);
4789 encoder
= file_data
->symtab_node_encoder
;
4790 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4792 gcc_assert (node
->definition
);
4793 ipa_read_node_info (&ib_main
, node
, data_in
);
4795 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4797 lto_data_in_delete (data_in
);
4800 /* Read ipcp jump functions. */
4803 ipa_prop_read_jump_functions (void)
4805 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4806 struct lto_file_decl_data
*file_data
;
4809 ipa_check_create_node_params ();
4810 ipa_check_create_edge_args ();
4811 ipa_register_cgraph_hooks ();
4813 while ((file_data
= file_data_vec
[j
++]))
4816 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4819 ipa_prop_read_section (file_data
, data
, len
);
4823 /* After merging units, we can get mismatch in argument counts.
4824 Also decl merging might've rendered parameter lists obsolete.
4825 Also compute called_with_variable_arg info. */
4828 ipa_update_after_lto_read (void)
4830 ipa_check_create_node_params ();
4831 ipa_check_create_edge_args ();
4835 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
4838 unsigned int count
= 0;
4839 lto_symtab_encoder_t encoder
;
4840 struct ipa_agg_replacement_value
*aggvals
, *av
;
4842 aggvals
= ipa_get_agg_replacements_for_node (node
);
4843 encoder
= ob
->decl_state
->symtab_node_encoder
;
4844 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4845 streamer_write_uhwi (ob
, node_ref
);
4847 for (av
= aggvals
; av
; av
= av
->next
)
4849 streamer_write_uhwi (ob
, count
);
4851 for (av
= aggvals
; av
; av
= av
->next
)
4853 struct bitpack_d bp
;
4855 streamer_write_uhwi (ob
, av
->offset
);
4856 streamer_write_uhwi (ob
, av
->index
);
4857 stream_write_tree (ob
, av
->value
, true);
4859 bp
= bitpack_create (ob
->main_stream
);
4860 bp_pack_value (&bp
, av
->by_ref
, 1);
4861 streamer_write_bitpack (&bp
);
4864 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4865 if (ts
&& vec_safe_length (ts
->alignments
) > 0)
4867 count
= ts
->alignments
->length ();
4869 streamer_write_uhwi (ob
, count
);
4870 for (unsigned i
= 0; i
< count
; ++i
)
4872 ipa_alignment
*parm_al
= &(*ts
->alignments
)[i
];
4874 struct bitpack_d bp
;
4875 bp
= bitpack_create (ob
->main_stream
);
4876 bp_pack_value (&bp
, parm_al
->known
, 1);
4877 streamer_write_bitpack (&bp
);
4880 streamer_write_uhwi (ob
, parm_al
->align
);
4881 streamer_write_hwi_in_range (ob
->main_stream
, 0, parm_al
->align
,
4887 streamer_write_uhwi (ob
, 0);
4890 /* Stream in the aggregate value replacement chain for NODE from IB. */
4893 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
4896 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4897 unsigned int count
, i
;
4899 count
= streamer_read_uhwi (ib
);
4900 for (i
= 0; i
<count
; i
++)
4902 struct ipa_agg_replacement_value
*av
;
4903 struct bitpack_d bp
;
4905 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
4906 av
->offset
= streamer_read_uhwi (ib
);
4907 av
->index
= streamer_read_uhwi (ib
);
4908 av
->value
= stream_read_tree (ib
, data_in
);
4909 bp
= streamer_read_bitpack (ib
);
4910 av
->by_ref
= bp_unpack_value (&bp
, 1);
4914 ipa_set_node_agg_value_chain (node
, aggvals
);
4916 count
= streamer_read_uhwi (ib
);
4919 ipcp_grow_transformations_if_necessary ();
4921 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4922 vec_safe_grow_cleared (ts
->alignments
, count
);
4924 for (i
= 0; i
< count
; i
++)
4926 ipa_alignment
*parm_al
;
4927 parm_al
= &(*ts
->alignments
)[i
];
4928 struct bitpack_d bp
;
4929 bp
= streamer_read_bitpack (ib
);
4930 parm_al
->known
= bp_unpack_value (&bp
, 1);
4933 parm_al
->align
= streamer_read_uhwi (ib
);
4935 = streamer_read_hwi_in_range (ib
, "ipa-prop misalign",
4942 /* Write all aggregate replacement for nodes in set. */
4945 ipcp_write_transformation_summaries (void)
4947 struct cgraph_node
*node
;
4948 struct output_block
*ob
;
4949 unsigned int count
= 0;
4950 lto_symtab_encoder_iterator lsei
;
4951 lto_symtab_encoder_t encoder
;
4953 ob
= create_output_block (LTO_section_ipcp_transform
);
4954 encoder
= ob
->decl_state
->symtab_node_encoder
;
4956 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4957 lsei_next_function_in_partition (&lsei
))
4959 node
= lsei_cgraph_node (lsei
);
4960 if (node
->has_gimple_body_p ())
4964 streamer_write_uhwi (ob
, count
);
4966 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4967 lsei_next_function_in_partition (&lsei
))
4969 node
= lsei_cgraph_node (lsei
);
4970 if (node
->has_gimple_body_p ())
4971 write_ipcp_transformation_info (ob
, node
);
4973 streamer_write_char_stream (ob
->main_stream
, 0);
4974 produce_asm (ob
, NULL
);
4975 destroy_output_block (ob
);
4978 /* Read replacements section in file FILE_DATA of length LEN with data
4982 read_replacements_section (struct lto_file_decl_data
*file_data
,
4986 const struct lto_function_header
*header
=
4987 (const struct lto_function_header
*) data
;
4988 const int cfg_offset
= sizeof (struct lto_function_header
);
4989 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4990 const int string_offset
= main_offset
+ header
->main_size
;
4991 struct data_in
*data_in
;
4995 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4996 header
->main_size
, file_data
->mode_table
);
4998 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4999 header
->string_size
, vNULL
);
5000 count
= streamer_read_uhwi (&ib_main
);
5002 for (i
= 0; i
< count
; i
++)
5005 struct cgraph_node
*node
;
5006 lto_symtab_encoder_t encoder
;
5008 index
= streamer_read_uhwi (&ib_main
);
5009 encoder
= file_data
->symtab_node_encoder
;
5010 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5012 gcc_assert (node
->definition
);
5013 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5015 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5017 lto_data_in_delete (data_in
);
5020 /* Read IPA-CP aggregate replacements. */
5023 ipcp_read_transformation_summaries (void)
5025 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5026 struct lto_file_decl_data
*file_data
;
5029 while ((file_data
= file_data_vec
[j
++]))
5032 const char *data
= lto_get_section_data (file_data
,
5033 LTO_section_ipcp_transform
,
5036 read_replacements_section (file_data
, data
, len
);
5040 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5044 adjust_agg_replacement_values (struct cgraph_node
*node
,
5045 struct ipa_agg_replacement_value
*aggval
)
5047 struct ipa_agg_replacement_value
*v
;
5048 int i
, c
= 0, d
= 0, *adj
;
5050 if (!node
->clone
.combined_args_to_skip
)
5053 for (v
= aggval
; v
; v
= v
->next
)
5055 gcc_assert (v
->index
>= 0);
5061 adj
= XALLOCAVEC (int, c
);
5062 for (i
= 0; i
< c
; i
++)
5063 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5071 for (v
= aggval
; v
; v
= v
->next
)
5072 v
->index
= adj
[v
->index
];
5075 /* Dominator walker driving the ipcp modification phase. */
5077 class ipcp_modif_dom_walker
: public dom_walker
5080 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
5081 vec
<ipa_param_descriptor
> descs
,
5082 struct ipa_agg_replacement_value
*av
,
5084 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5085 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5087 virtual void before_dom_children (basic_block
);
5090 struct ipa_func_body_info
*m_fbi
;
5091 vec
<ipa_param_descriptor
> m_descriptors
;
5092 struct ipa_agg_replacement_value
*m_aggval
;
5093 bool *m_something_changed
, *m_cfg_changed
;
5097 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5099 gimple_stmt_iterator gsi
;
5100 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5102 struct ipa_agg_replacement_value
*v
;
5103 gimple stmt
= gsi_stmt (gsi
);
5105 HOST_WIDE_INT offset
, size
;
5109 if (!gimple_assign_load_p (stmt
))
5111 rhs
= gimple_assign_rhs1 (stmt
);
5112 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5117 while (handled_component_p (t
))
5119 /* V_C_E can do things like convert an array of integers to one
5120 bigger integer and similar things we do not handle below. */
5121 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
5126 t
= TREE_OPERAND (t
, 0);
5131 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5132 &offset
, &size
, &by_ref
))
5134 for (v
= m_aggval
; v
; v
= v
->next
)
5135 if (v
->index
== index
5136 && v
->offset
== offset
)
5139 || v
->by_ref
!= by_ref
5140 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
5143 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5144 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5146 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5147 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5148 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5149 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5150 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5155 fprintf (dump_file
, " const ");
5156 print_generic_expr (dump_file
, v
->value
, 0);
5157 fprintf (dump_file
, " can't be converted to type of ");
5158 print_generic_expr (dump_file
, rhs
, 0);
5159 fprintf (dump_file
, "\n");
5167 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5169 fprintf (dump_file
, "Modifying stmt:\n ");
5170 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5172 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5175 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5177 fprintf (dump_file
, "into:\n ");
5178 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5179 fprintf (dump_file
, "\n");
5182 *m_something_changed
= true;
5183 if (maybe_clean_eh_stmt (stmt
)
5184 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5185 *m_cfg_changed
= true;
5190 /* Update alignment of formal parameters as described in
5191 ipcp_transformation_summary. */
5194 ipcp_update_alignments (struct cgraph_node
*node
)
5196 tree fndecl
= node
->decl
;
5197 tree parm
= DECL_ARGUMENTS (fndecl
);
5198 tree next_parm
= parm
;
5199 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5200 if (!ts
|| vec_safe_length (ts
->alignments
) == 0)
5202 const vec
<ipa_alignment
, va_gc
> &alignments
= *ts
->alignments
;
5203 unsigned count
= alignments
.length ();
5205 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5207 if (node
->clone
.combined_args_to_skip
5208 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5210 gcc_checking_assert (parm
);
5211 next_parm
= DECL_CHAIN (parm
);
5213 if (!alignments
[i
].known
|| !is_gimple_reg (parm
))
5215 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5220 fprintf (dump_file
, " Adjusting alignment of param %u to %u, "
5221 "misalignment to %u\n", i
, alignments
[i
].align
,
5222 alignments
[i
].misalign
);
5224 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5225 gcc_checking_assert (pi
);
5227 unsigned old_misalign
;
5228 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5231 && old_align
>= alignments
[i
].align
)
5234 fprintf (dump_file
, " But the alignment was already %u.\n",
5238 set_ptr_info_alignment (pi
, alignments
[i
].align
, alignments
[i
].misalign
);
5242 /* IPCP transformation phase doing propagation of aggregate values. */
5245 ipcp_transform_function (struct cgraph_node
*node
)
5247 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
5248 struct ipa_func_body_info fbi
;
5249 struct ipa_agg_replacement_value
*aggval
;
5251 bool cfg_changed
= false, something_changed
= false;
5253 gcc_checking_assert (cfun
);
5254 gcc_checking_assert (current_function_decl
);
5257 fprintf (dump_file
, "Modification phase of node %s/%i\n",
5258 node
->name (), node
->order
);
5260 ipcp_update_alignments (node
);
5261 aggval
= ipa_get_agg_replacements_for_node (node
);
5264 param_count
= count_formal_params (node
->decl
);
5265 if (param_count
== 0)
5267 adjust_agg_replacement_values (node
, aggval
);
5269 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5273 fbi
.bb_infos
= vNULL
;
5274 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5275 fbi
.param_count
= param_count
;
5278 descriptors
.safe_grow_cleared (param_count
);
5279 ipa_populate_param_decls (node
, descriptors
);
5280 calculate_dominance_info (CDI_DOMINATORS
);
5281 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5282 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5285 struct ipa_bb_info
*bi
;
5286 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5287 free_ipa_bb_info (bi
);
5288 fbi
.bb_infos
.release ();
5289 free_dominance_info (CDI_DOMINATORS
);
5290 (*ipcp_transformations
)[node
->uid
].agg_values
= NULL
;
5291 (*ipcp_transformations
)[node
->uid
].alignments
= NULL
;
5292 descriptors
.release ();
5294 if (!something_changed
)
5296 else if (cfg_changed
)
5297 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5299 return TODO_update_ssa_only_virtuals
;