re PR middle-end/38474 (compile time explosion in dataflow_set_preserve_mem_locs...
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "timevar.h"
36 #include "flags.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
41 #include "data-streamer.h"
42 #include "tree-streamer.h"
43
44
45 /* Intermediate information about a parameter that is only useful during the
46 run of ipa_analyze_node and is not kept afterwards. */
47
48 struct param_analysis_info
49 {
50 bool modified;
51 bitmap visited_statements;
52 };
53
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
56 /* Vector where the parameter infos are actually stored. */
57 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
58
59 /* Holders of ipa cgraph hooks: */
60 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
61 static struct cgraph_node_hook_list *node_removal_hook_holder;
62 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
63 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
64 static struct cgraph_node_hook_list *function_insertion_hook_holder;
65
66 /* Return index of the formal whose tree is PTREE in function which corresponds
67 to INFO. */
68
69 int
70 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
71 {
72 int i, count;
73
74 count = ipa_get_param_count (info);
75 for (i = 0; i < count; i++)
76 if (ipa_get_param (info, i) == ptree)
77 return i;
78
79 return -1;
80 }
81
82 /* Populate the param_decl field in parameter descriptors of INFO that
83 corresponds to NODE. */
84
85 static void
86 ipa_populate_param_decls (struct cgraph_node *node,
87 struct ipa_node_params *info)
88 {
89 tree fndecl;
90 tree fnargs;
91 tree parm;
92 int param_num;
93
94 fndecl = node->symbol.decl;
95 fnargs = DECL_ARGUMENTS (fndecl);
96 param_num = 0;
97 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
98 {
99 VEC_index (ipa_param_descriptor_t,
100 info->descriptors, param_num)->decl = parm;
101 param_num++;
102 }
103 }
104
105 /* Return how many formal parameters FNDECL has. */
106
107 static inline int
108 count_formal_params (tree fndecl)
109 {
110 tree parm;
111 int count = 0;
112
113 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
114 count++;
115
116 return count;
117 }
118
119 /* Initialize the ipa_node_params structure associated with NODE by counting
120 the function parameters, creating the descriptors and populating their
121 param_decls. */
122
123 void
124 ipa_initialize_node_params (struct cgraph_node *node)
125 {
126 struct ipa_node_params *info = IPA_NODE_REF (node);
127
128 if (!info->descriptors)
129 {
130 int param_count;
131
132 param_count = count_formal_params (node->symbol.decl);
133 if (param_count)
134 {
135 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
136 info->descriptors, param_count);
137 ipa_populate_param_decls (node, info);
138 }
139 }
140 }
141
142 /* Print the jump functions associated with call graph edge CS to file F. */
143
144 static void
145 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
146 {
147 int i, count;
148
149 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
150 for (i = 0; i < count; i++)
151 {
152 struct ipa_jump_func *jump_func;
153 enum jump_func_type type;
154
155 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
156 type = jump_func->type;
157
158 fprintf (f, " param %d: ", i);
159 if (type == IPA_JF_UNKNOWN)
160 fprintf (f, "UNKNOWN\n");
161 else if (type == IPA_JF_KNOWN_TYPE)
162 {
163 fprintf (f, "KNOWN TYPE: base ");
164 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
165 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
166 jump_func->value.known_type.offset);
167 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
168 fprintf (f, "\n");
169 }
170 else if (type == IPA_JF_CONST)
171 {
172 tree val = jump_func->value.constant;
173 fprintf (f, "CONST: ");
174 print_generic_expr (f, val, 0);
175 if (TREE_CODE (val) == ADDR_EXPR
176 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
177 {
178 fprintf (f, " -> ");
179 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
180 0);
181 }
182 fprintf (f, "\n");
183 }
184 else if (type == IPA_JF_CONST_MEMBER_PTR)
185 {
186 fprintf (f, "CONST MEMBER PTR: ");
187 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
188 fprintf (f, ", ");
189 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
190 fprintf (f, "\n");
191 }
192 else if (type == IPA_JF_PASS_THROUGH)
193 {
194 fprintf (f, "PASS THROUGH: ");
195 fprintf (f, "%d, op %s ",
196 jump_func->value.pass_through.formal_id,
197 tree_code_name[(int)
198 jump_func->value.pass_through.operation]);
199 if (jump_func->value.pass_through.operation != NOP_EXPR)
200 print_generic_expr (f,
201 jump_func->value.pass_through.operand, 0);
202 fprintf (f, "\n");
203 }
204 else if (type == IPA_JF_ANCESTOR)
205 {
206 fprintf (f, "ANCESTOR: ");
207 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
208 jump_func->value.ancestor.formal_id,
209 jump_func->value.ancestor.offset);
210 print_generic_expr (f, jump_func->value.ancestor.type, 0);
211 fprintf (f, "\n");
212 }
213 }
214 }
215
216
217 /* Print the jump functions of all arguments on all call graph edges going from
218 NODE to file F. */
219
220 void
221 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
222 {
223 struct cgraph_edge *cs;
224 int i;
225
226 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
227 for (cs = node->callees; cs; cs = cs->next_callee)
228 {
229 if (!ipa_edge_args_info_available_for_edge_p (cs))
230 continue;
231
232 fprintf (f, " callsite %s/%i -> %s/%i : \n",
233 xstrdup (cgraph_node_name (node)), node->uid,
234 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
235 ipa_print_node_jump_functions_for_edge (f, cs);
236 }
237
238 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
239 {
240 if (!ipa_edge_args_info_available_for_edge_p (cs))
241 continue;
242
243 if (cs->call_stmt)
244 {
245 fprintf (f, " indirect callsite %d for stmt ", i);
246 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
247 }
248 else
249 fprintf (f, " indirect callsite %d :\n", i);
250 ipa_print_node_jump_functions_for_edge (f, cs);
251
252 }
253 }
254
255 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
256
257 void
258 ipa_print_all_jump_functions (FILE *f)
259 {
260 struct cgraph_node *node;
261
262 fprintf (f, "\nJump functions:\n");
263 FOR_EACH_FUNCTION (node)
264 {
265 ipa_print_node_jump_functions (f, node);
266 }
267 }
268
269 /* Set JFUNC to be a known type jump function. */
270
271 static void
272 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
273 tree base_type, tree component_type)
274 {
275 jfunc->type = IPA_JF_KNOWN_TYPE;
276 jfunc->value.known_type.offset = offset,
277 jfunc->value.known_type.base_type = base_type;
278 jfunc->value.known_type.component_type = component_type;
279 }
280
281 /* Set JFUNC to be a constant jmp function. */
282
283 static void
284 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
285 {
286 jfunc->type = IPA_JF_CONST;
287 jfunc->value.constant = constant;
288 }
289
290 /* Set JFUNC to be a simple pass-through jump function. */
291 static void
292 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id)
293 {
294 jfunc->type = IPA_JF_PASS_THROUGH;
295 jfunc->value.pass_through.operand = NULL_TREE;
296 jfunc->value.pass_through.formal_id = formal_id;
297 jfunc->value.pass_through.operation = NOP_EXPR;
298 }
299
300 /* Set JFUNC to be an arithmetic pass through jump function. */
301
302 static void
303 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
304 tree operand, enum tree_code operation)
305 {
306 jfunc->type = IPA_JF_PASS_THROUGH;
307 jfunc->value.pass_through.operand = operand;
308 jfunc->value.pass_through.formal_id = formal_id;
309 jfunc->value.pass_through.operation = operation;
310 }
311
312 /* Set JFUNC to be an ancestor jump function. */
313
314 static void
315 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
316 tree type, int formal_id)
317 {
318 jfunc->type = IPA_JF_ANCESTOR;
319 jfunc->value.ancestor.formal_id = formal_id;
320 jfunc->value.ancestor.offset = offset;
321 jfunc->value.ancestor.type = type;
322 }
323
324 /* Simple function filling in a member pointer constant jump function (with PFN
325 and DELTA as the constant value) into JFUNC. */
326
327 static void
328 ipa_set_jf_member_ptr_cst (struct ipa_jump_func *jfunc,
329 tree pfn, tree delta)
330 {
331 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
332 jfunc->value.member_cst.pfn = pfn;
333 jfunc->value.member_cst.delta = delta;
334 }
335
336 /* Structure to be passed in between detect_type_change and
337 check_stmt_for_type_change. */
338
339 struct type_change_info
340 {
341 /* Offset into the object where there is the virtual method pointer we are
342 looking for. */
343 HOST_WIDE_INT offset;
344 /* The declaration or SSA_NAME pointer of the base that we are checking for
345 type change. */
346 tree object;
347 /* If we actually can tell the type that the object has changed to, it is
348 stored in this field. Otherwise it remains NULL_TREE. */
349 tree known_current_type;
350 /* Set to true if dynamic type change has been detected. */
351 bool type_maybe_changed;
352 /* Set to true if multiple types have been encountered. known_current_type
353 must be disregarded in that case. */
354 bool multiple_types_encountered;
355 };
356
357 /* Return true if STMT can modify a virtual method table pointer.
358
359 This function makes special assumptions about both constructors and
360 destructors which are all the functions that are allowed to alter the VMT
361 pointers. It assumes that destructors begin with assignment into all VMT
362 pointers and that constructors essentially look in the following way:
363
364 1) The very first thing they do is that they call constructors of ancestor
365 sub-objects that have them.
366
367 2) Then VMT pointers of this and all its ancestors is set to new values
368 corresponding to the type corresponding to the constructor.
369
370 3) Only afterwards, other stuff such as constructor of member sub-objects
371 and the code written by the user is run. Only this may include calling
372 virtual functions, directly or indirectly.
373
374 There is no way to call a constructor of an ancestor sub-object in any
375 other way.
376
377 This means that we do not have to care whether constructors get the correct
378 type information because they will always change it (in fact, if we define
379 the type to be given by the VMT pointer, it is undefined).
380
381 The most important fact to derive from the above is that if, for some
382 statement in the section 3, we try to detect whether the dynamic type has
383 changed, we can safely ignore all calls as we examine the function body
384 backwards until we reach statements in section 2 because these calls cannot
385 be ancestor constructors or destructors (if the input is not bogus) and so
386 do not change the dynamic type (this holds true only for automatically
387 allocated objects but at the moment we devirtualize only these). We then
388 must detect that statements in section 2 change the dynamic type and can try
389 to derive the new type. That is enough and we can stop, we will never see
390 the calls into constructors of sub-objects in this code. Therefore we can
391 safely ignore all call statements that we traverse.
392 */
393
394 static bool
395 stmt_may_be_vtbl_ptr_store (gimple stmt)
396 {
397 if (is_gimple_call (stmt))
398 return false;
399 else if (is_gimple_assign (stmt))
400 {
401 tree lhs = gimple_assign_lhs (stmt);
402
403 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
404 {
405 if (flag_strict_aliasing
406 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
407 return false;
408
409 if (TREE_CODE (lhs) == COMPONENT_REF
410 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
411 return false;
412 /* In the future we might want to use get_base_ref_and_offset to find
413 if there is a field corresponding to the offset and if so, proceed
414 almost like if it was a component ref. */
415 }
416 }
417 return true;
418 }
419
420 /* If STMT can be proved to be an assignment to the virtual method table
421 pointer of ANALYZED_OBJ and the type associated with the new table
422 identified, return the type. Otherwise return NULL_TREE. */
423
424 static tree
425 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
426 {
427 HOST_WIDE_INT offset, size, max_size;
428 tree lhs, rhs, base;
429
430 if (!gimple_assign_single_p (stmt))
431 return NULL_TREE;
432
433 lhs = gimple_assign_lhs (stmt);
434 rhs = gimple_assign_rhs1 (stmt);
435 if (TREE_CODE (lhs) != COMPONENT_REF
436 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
437 || TREE_CODE (rhs) != ADDR_EXPR)
438 return NULL_TREE;
439 rhs = get_base_address (TREE_OPERAND (rhs, 0));
440 if (!rhs
441 || TREE_CODE (rhs) != VAR_DECL
442 || !DECL_VIRTUAL_P (rhs))
443 return NULL_TREE;
444
445 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
446 if (offset != tci->offset
447 || size != POINTER_SIZE
448 || max_size != POINTER_SIZE)
449 return NULL_TREE;
450 if (TREE_CODE (base) == MEM_REF)
451 {
452 if (TREE_CODE (tci->object) != MEM_REF
453 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
454 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
455 TREE_OPERAND (base, 1)))
456 return NULL_TREE;
457 }
458 else if (tci->object != base)
459 return NULL_TREE;
460
461 return DECL_CONTEXT (rhs);
462 }
463
464 /* Callback of walk_aliased_vdefs and a helper function for
465 detect_type_change to check whether a particular statement may modify
466 the virtual table pointer, and if possible also determine the new type of
467 the (sub-)object. It stores its result into DATA, which points to a
468 type_change_info structure. */
469
470 static bool
471 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
472 {
473 gimple stmt = SSA_NAME_DEF_STMT (vdef);
474 struct type_change_info *tci = (struct type_change_info *) data;
475
476 if (stmt_may_be_vtbl_ptr_store (stmt))
477 {
478 tree type;
479 type = extr_type_from_vtbl_ptr_store (stmt, tci);
480 if (tci->type_maybe_changed
481 && type != tci->known_current_type)
482 tci->multiple_types_encountered = true;
483 tci->known_current_type = type;
484 tci->type_maybe_changed = true;
485 return true;
486 }
487 else
488 return false;
489 }
490
491
492
493 /* Like detect_type_change but with extra argument COMP_TYPE which will become
494 the component type part of new JFUNC of dynamic type change is detected and
495 the new base type is identified. */
496
497 static bool
498 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
499 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
500 {
501 struct type_change_info tci;
502 ao_ref ao;
503
504 gcc_checking_assert (DECL_P (arg)
505 || TREE_CODE (arg) == MEM_REF
506 || handled_component_p (arg));
507 /* Const calls cannot call virtual methods through VMT and so type changes do
508 not matter. */
509 if (!flag_devirtualize || !gimple_vuse (call))
510 return false;
511
512 ao_ref_init (&ao, arg);
513 ao.base = base;
514 ao.offset = offset;
515 ao.size = POINTER_SIZE;
516 ao.max_size = ao.size;
517
518 tci.offset = offset;
519 tci.object = get_base_address (arg);
520 tci.known_current_type = NULL_TREE;
521 tci.type_maybe_changed = false;
522 tci.multiple_types_encountered = false;
523
524 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
525 &tci, NULL);
526 if (!tci.type_maybe_changed)
527 return false;
528
529 if (!tci.known_current_type
530 || tci.multiple_types_encountered
531 || offset != 0)
532 jfunc->type = IPA_JF_UNKNOWN;
533 else
534 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
535
536 return true;
537 }
538
539 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
540 looking for assignments to its virtual table pointer. If it is, return true
541 and fill in the jump function JFUNC with relevant type information or set it
542 to unknown. ARG is the object itself (not a pointer to it, unless
543 dereferenced). BASE is the base of the memory access as returned by
544 get_ref_base_and_extent, as is the offset. */
545
546 static bool
547 detect_type_change (tree arg, tree base, gimple call,
548 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
549 {
550 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
551 }
552
553 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
554 SSA name (its dereference will become the base and the offset is assumed to
555 be zero). */
556
557 static bool
558 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
559 {
560 tree comp_type;
561
562 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
563 if (!flag_devirtualize
564 || !POINTER_TYPE_P (TREE_TYPE (arg))
565 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
566 return false;
567
568 comp_type = TREE_TYPE (TREE_TYPE (arg));
569 arg = build2 (MEM_REF, ptr_type_node, arg,
570 build_int_cst (ptr_type_node, 0));
571
572 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
573 }
574
575 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
576 boolean variable pointed to by DATA. */
577
578 static bool
579 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
580 void *data)
581 {
582 bool *b = (bool *) data;
583 *b = true;
584 return true;
585 }
586
587 /* Return true if the formal parameter PARM might have been modified in this
588 function before reaching the statement STMT. PARM_AINFO is a pointer to a
589 structure containing temporary information about PARM. */
590
591 static bool
592 is_parm_modified_before_stmt (struct param_analysis_info *parm_ainfo,
593 gimple stmt, tree parm)
594 {
595 bool modified = false;
596 ao_ref refd;
597
598 if (parm_ainfo->modified)
599 return true;
600
601 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
602 ao_ref_init (&refd, parm);
603 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
604 &modified, &parm_ainfo->visited_statements);
605 if (modified)
606 {
607 parm_ainfo->modified = true;
608 return true;
609 }
610 return false;
611 }
612
613 /* If STMT is an assignment that loads a value from an parameter declaration,
614 return the index of the parameter in ipa_node_params which has not been
615 modified. Otherwise return -1. */
616
617 static int
618 load_from_unmodified_param (struct ipa_node_params *info,
619 struct param_analysis_info *parms_ainfo,
620 gimple stmt)
621 {
622 int index;
623 tree op1;
624
625 if (!gimple_assign_single_p (stmt))
626 return -1;
627
628 op1 = gimple_assign_rhs1 (stmt);
629 if (TREE_CODE (op1) != PARM_DECL)
630 return -1;
631
632 index = ipa_get_param_decl_index (info, op1);
633 if (index < 0
634 || is_parm_modified_before_stmt (&parms_ainfo[index], stmt, op1))
635 return -1;
636
637 return index;
638 }
639
640 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
641 of an assignment statement STMT, try to determine whether we are actually
642 handling any of the following cases and construct an appropriate jump
643 function into JFUNC if so:
644
645 1) The passed value is loaded from a formal parameter which is not a gimple
646 register (most probably because it is addressable, the value has to be
647 scalar) and we can guarantee the value has not changed. This case can
648 therefore be described by a simple pass-through jump function. For example:
649
650 foo (int a)
651 {
652 int a.0;
653
654 a.0_2 = a;
655 bar (a.0_2);
656
657 2) The passed value can be described by a simple arithmetic pass-through
658 jump function. E.g.
659
660 foo (int a)
661 {
662 int D.2064;
663
664 D.2064_4 = a.1(D) + 4;
665 bar (D.2064_4);
666
667 This case can also occur in combination of the previous one, e.g.:
668
669 foo (int a, int z)
670 {
671 int a.0;
672 int D.2064;
673
674 a.0_3 = a;
675 D.2064_4 = a.0_3 + 4;
676 foo (D.2064_4);
677
678 3) The passed value is an address of an object within another one (which
679 also passed by reference). Such situations are described by an ancestor
680 jump function and describe situations such as:
681
682 B::foo() (struct B * const this)
683 {
684 struct A * D.1845;
685
686 D.1845_2 = &this_1(D)->D.1748;
687 A::bar (D.1845_2);
688
689 INFO is the structure describing individual parameters access different
690 stages of IPA optimizations. PARMS_AINFO contains the information that is
691 only needed for intraprocedural analysis. */
692
693 static void
694 compute_complex_assign_jump_func (struct ipa_node_params *info,
695 struct param_analysis_info *parms_ainfo,
696 struct ipa_jump_func *jfunc,
697 gimple call, gimple stmt, tree name)
698 {
699 HOST_WIDE_INT offset, size, max_size;
700 tree op1, tc_ssa, base, ssa;
701 int index;
702
703 op1 = gimple_assign_rhs1 (stmt);
704
705 if (TREE_CODE (op1) == SSA_NAME)
706 {
707 if (SSA_NAME_IS_DEFAULT_DEF (op1))
708 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
709 else
710 index = load_from_unmodified_param (info, parms_ainfo,
711 SSA_NAME_DEF_STMT (op1));
712 tc_ssa = op1;
713 }
714 else
715 {
716 index = load_from_unmodified_param (info, parms_ainfo, stmt);
717 tc_ssa = gimple_assign_lhs (stmt);
718 }
719
720 if (index >= 0)
721 {
722 tree op2 = gimple_assign_rhs2 (stmt);
723
724 if (op2)
725 {
726 if (!is_gimple_ip_invariant (op2)
727 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
728 && !useless_type_conversion_p (TREE_TYPE (name),
729 TREE_TYPE (op1))))
730 return;
731
732 ipa_set_jf_arith_pass_through (jfunc, index, op2,
733 gimple_assign_rhs_code (stmt));
734 }
735 else if (gimple_assign_single_p (stmt)
736 && !detect_type_change_ssa (tc_ssa, call, jfunc))
737 ipa_set_jf_simple_pass_through (jfunc, index);
738 return;
739 }
740
741 if (TREE_CODE (op1) != ADDR_EXPR)
742 return;
743 op1 = TREE_OPERAND (op1, 0);
744 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
745 return;
746 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
747 if (TREE_CODE (base) != MEM_REF
748 /* If this is a varying address, punt. */
749 || max_size == -1
750 || max_size != size)
751 return;
752 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
753 ssa = TREE_OPERAND (base, 0);
754 if (TREE_CODE (ssa) != SSA_NAME
755 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
756 || offset < 0)
757 return;
758
759 /* Dynamic types are changed only in constructors and destructors and */
760 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
761 if (index >= 0
762 && !detect_type_change (op1, base, call, jfunc, offset))
763 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index);
764 }
765
766 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
767 it looks like:
768
769 iftmp.1_3 = &obj_2(D)->D.1762;
770
771 The base of the MEM_REF must be a default definition SSA NAME of a
772 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
773 whole MEM_REF expression is returned and the offset calculated from any
774 handled components and the MEM_REF itself is stored into *OFFSET. The whole
775 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
776
777 static tree
778 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
779 {
780 HOST_WIDE_INT size, max_size;
781 tree expr, parm, obj;
782
783 if (!gimple_assign_single_p (assign))
784 return NULL_TREE;
785 expr = gimple_assign_rhs1 (assign);
786
787 if (TREE_CODE (expr) != ADDR_EXPR)
788 return NULL_TREE;
789 expr = TREE_OPERAND (expr, 0);
790 obj = expr;
791 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
792
793 if (TREE_CODE (expr) != MEM_REF
794 /* If this is a varying address, punt. */
795 || max_size == -1
796 || max_size != size
797 || *offset < 0)
798 return NULL_TREE;
799 parm = TREE_OPERAND (expr, 0);
800 if (TREE_CODE (parm) != SSA_NAME
801 || !SSA_NAME_IS_DEFAULT_DEF (parm)
802 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
803 return NULL_TREE;
804
805 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
806 *obj_p = obj;
807 return expr;
808 }
809
810
811 /* Given that an actual argument is an SSA_NAME that is a result of a phi
812 statement PHI, try to find out whether NAME is in fact a
813 multiple-inheritance typecast from a descendant into an ancestor of a formal
814 parameter and thus can be described by an ancestor jump function and if so,
815 write the appropriate function into JFUNC.
816
817 Essentially we want to match the following pattern:
818
819 if (obj_2(D) != 0B)
820 goto <bb 3>;
821 else
822 goto <bb 4>;
823
824 <bb 3>:
825 iftmp.1_3 = &obj_2(D)->D.1762;
826
827 <bb 4>:
828 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
829 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
830 return D.1879_6; */
831
832 static void
833 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
834 struct ipa_jump_func *jfunc,
835 gimple call, gimple phi)
836 {
837 HOST_WIDE_INT offset;
838 gimple assign, cond;
839 basic_block phi_bb, assign_bb, cond_bb;
840 tree tmp, parm, expr, obj;
841 int index, i;
842
843 if (gimple_phi_num_args (phi) != 2)
844 return;
845
846 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
847 tmp = PHI_ARG_DEF (phi, 0);
848 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
849 tmp = PHI_ARG_DEF (phi, 1);
850 else
851 return;
852 if (TREE_CODE (tmp) != SSA_NAME
853 || SSA_NAME_IS_DEFAULT_DEF (tmp)
854 || !POINTER_TYPE_P (TREE_TYPE (tmp))
855 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
856 return;
857
858 assign = SSA_NAME_DEF_STMT (tmp);
859 assign_bb = gimple_bb (assign);
860 if (!single_pred_p (assign_bb))
861 return;
862 expr = get_ancestor_addr_info (assign, &obj, &offset);
863 if (!expr)
864 return;
865 parm = TREE_OPERAND (expr, 0);
866 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
867 gcc_assert (index >= 0);
868
869 cond_bb = single_pred (assign_bb);
870 cond = last_stmt (cond_bb);
871 if (!cond
872 || gimple_code (cond) != GIMPLE_COND
873 || gimple_cond_code (cond) != NE_EXPR
874 || gimple_cond_lhs (cond) != parm
875 || !integer_zerop (gimple_cond_rhs (cond)))
876 return;
877
878 phi_bb = gimple_bb (phi);
879 for (i = 0; i < 2; i++)
880 {
881 basic_block pred = EDGE_PRED (phi_bb, i)->src;
882 if (pred != assign_bb && pred != cond_bb)
883 return;
884 }
885
886 if (!detect_type_change (obj, expr, call, jfunc, offset))
887 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index);
888 }
889
890 /* Given OP which is passed as an actual argument to a called function,
891 determine if it is possible to construct a KNOWN_TYPE jump function for it
892 and if so, create one and store it to JFUNC. */
893
894 static void
895 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
896 gimple call)
897 {
898 HOST_WIDE_INT offset, size, max_size;
899 tree base;
900
901 if (!flag_devirtualize
902 || TREE_CODE (op) != ADDR_EXPR
903 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
904 return;
905
906 op = TREE_OPERAND (op, 0);
907 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
908 if (!DECL_P (base)
909 || max_size == -1
910 || max_size != size
911 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
912 || is_global_var (base))
913 return;
914
915 if (!TYPE_BINFO (TREE_TYPE (base))
916 || detect_type_change (op, base, call, jfunc, offset))
917 return;
918
919 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
920 }
921
922
923 /* Determine the jump functions of scalar arguments. Scalar means SSA names
924 and constants of a number of selected types. INFO is the ipa_node_params
925 structure associated with the caller, PARMS_AINFO describes state of
926 analysis with respect to individual formal parameters. ARGS is the
927 ipa_edge_args structure describing the callsite CALL which is the call
928 statement being examined.*/
929
930 static void
931 compute_scalar_jump_functions (struct ipa_node_params *info,
932 struct param_analysis_info *parms_ainfo,
933 struct ipa_edge_args *args,
934 gimple call)
935 {
936 tree arg;
937 unsigned num = 0;
938
939 for (num = 0; num < gimple_call_num_args (call); num++)
940 {
941 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, num);
942 arg = gimple_call_arg (call, num);
943
944 if (is_gimple_ip_invariant (arg))
945 ipa_set_jf_constant (jfunc, arg);
946 else if (TREE_CODE (arg) == SSA_NAME)
947 {
948 if (SSA_NAME_IS_DEFAULT_DEF (arg))
949 {
950 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
951
952 if (index >= 0
953 && !detect_type_change_ssa (arg, call, jfunc))
954 ipa_set_jf_simple_pass_through (jfunc, index);
955 }
956 else
957 {
958 gimple stmt = SSA_NAME_DEF_STMT (arg);
959 if (is_gimple_assign (stmt))
960 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
961 call, stmt, arg);
962 else if (gimple_code (stmt) == GIMPLE_PHI)
963 compute_complex_ancestor_jump_func (info, jfunc, call, stmt);
964 }
965 }
966 else
967 compute_known_type_jump_func (arg, jfunc, call);
968 }
969 }
970
971 /* Inspect the given TYPE and return true iff it has the same structure (the
972 same number of fields of the same types) as a C++ member pointer. If
973 METHOD_PTR and DELTA are non-NULL, store the trees representing the
974 corresponding fields there. */
975
976 static bool
977 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
978 {
979 tree fld;
980
981 if (TREE_CODE (type) != RECORD_TYPE)
982 return false;
983
984 fld = TYPE_FIELDS (type);
985 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
986 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
987 return false;
988
989 if (method_ptr)
990 *method_ptr = fld;
991
992 fld = DECL_CHAIN (fld);
993 if (!fld || INTEGRAL_TYPE_P (fld))
994 return false;
995 if (delta)
996 *delta = fld;
997
998 if (DECL_CHAIN (fld))
999 return false;
1000
1001 return true;
1002 }
1003
1004 /* Go through arguments of the CALL and for every one that looks like a member
1005 pointer, check whether it can be safely declared pass-through and if so,
1006 mark that to the corresponding item of jump FUNCTIONS. Return true iff
1007 there are non-pass-through member pointers within the arguments. INFO
1008 describes formal parameters of the caller. PARMS_INFO is a pointer to a
1009 vector containing intermediate information about each formal parameter. */
1010
1011 static bool
1012 compute_pass_through_member_ptrs (struct ipa_node_params *info,
1013 struct param_analysis_info *parms_ainfo,
1014 struct ipa_edge_args *args,
1015 gimple call)
1016 {
1017 bool undecided_members = false;
1018 unsigned num;
1019 tree arg;
1020
1021 for (num = 0; num < gimple_call_num_args (call); num++)
1022 {
1023 arg = gimple_call_arg (call, num);
1024
1025 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
1026 {
1027 if (TREE_CODE (arg) == PARM_DECL)
1028 {
1029 int index = ipa_get_param_decl_index (info, arg);
1030
1031 gcc_assert (index >=0);
1032 if (!is_parm_modified_before_stmt (&parms_ainfo[index], call,
1033 arg))
1034 {
1035 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args,
1036 num);
1037 ipa_set_jf_simple_pass_through (jfunc, index);
1038 }
1039 else
1040 undecided_members = true;
1041 }
1042 else
1043 undecided_members = true;
1044 }
1045 }
1046
1047 return undecided_members;
1048 }
1049
1050 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1051 return the rhs of its defining statement. */
1052
1053 static inline tree
1054 get_ssa_def_if_simple_copy (tree rhs)
1055 {
1056 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1057 {
1058 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1059
1060 if (gimple_assign_single_p (def_stmt))
1061 rhs = gimple_assign_rhs1 (def_stmt);
1062 else
1063 break;
1064 }
1065 return rhs;
1066 }
1067
1068 /* Traverse statements from CALL backwards, scanning whether the argument ARG
1069 which is a member pointer is filled in with constant values. If it is, fill
1070 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
1071 fields of the record type of the member pointer. To give an example, we
1072 look for a pattern looking like the following:
1073
1074 D.2515.__pfn ={v} printStuff;
1075 D.2515.__delta ={v} 0;
1076 i_1 = doprinting (D.2515); */
1077
1078 static void
1079 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
1080 tree delta_field, struct ipa_jump_func *jfunc)
1081 {
1082 gimple_stmt_iterator gsi;
1083 tree method = NULL_TREE;
1084 tree delta = NULL_TREE;
1085
1086 gsi = gsi_for_stmt (call);
1087
1088 gsi_prev (&gsi);
1089 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1090 {
1091 gimple stmt = gsi_stmt (gsi);
1092 tree lhs, rhs, fld;
1093
1094 if (!stmt_may_clobber_ref_p (stmt, arg))
1095 continue;
1096 if (!gimple_assign_single_p (stmt))
1097 return;
1098
1099 lhs = gimple_assign_lhs (stmt);
1100 rhs = gimple_assign_rhs1 (stmt);
1101
1102 if (TREE_CODE (lhs) != COMPONENT_REF
1103 || TREE_OPERAND (lhs, 0) != arg)
1104 return;
1105
1106 fld = TREE_OPERAND (lhs, 1);
1107 if (!method && fld == method_field)
1108 {
1109 rhs = get_ssa_def_if_simple_copy (rhs);
1110 if (TREE_CODE (rhs) == ADDR_EXPR
1111 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
1112 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
1113 {
1114 method = TREE_OPERAND (rhs, 0);
1115 if (delta)
1116 {
1117 ipa_set_jf_member_ptr_cst (jfunc, rhs, delta);
1118 return;
1119 }
1120 }
1121 else
1122 return;
1123 }
1124
1125 if (!delta && fld == delta_field)
1126 {
1127 rhs = get_ssa_def_if_simple_copy (rhs);
1128 if (TREE_CODE (rhs) == INTEGER_CST)
1129 {
1130 delta = rhs;
1131 if (method)
1132 {
1133 ipa_set_jf_member_ptr_cst (jfunc, rhs, delta);
1134 return;
1135 }
1136 }
1137 else
1138 return;
1139 }
1140 }
1141
1142 return;
1143 }
1144
1145 /* Go through the arguments of the CALL and for every member pointer within
1146 tries determine whether it is a constant. If it is, create a corresponding
1147 constant jump function in FUNCTIONS which is an array of jump functions
1148 associated with the call. */
1149
1150 static void
1151 compute_cst_member_ptr_arguments (struct ipa_edge_args *args,
1152 gimple call)
1153 {
1154 unsigned num;
1155 tree arg, method_field, delta_field;
1156
1157 for (num = 0; num < gimple_call_num_args (call); num++)
1158 {
1159 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, num);
1160 arg = gimple_call_arg (call, num);
1161
1162 if (jfunc->type == IPA_JF_UNKNOWN
1163 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
1164 &delta_field))
1165 determine_cst_member_ptr (call, arg, method_field, delta_field, jfunc);
1166 }
1167 }
1168
1169 /* Compute jump function for all arguments of callsite CS and insert the
1170 information in the jump_functions array in the ipa_edge_args corresponding
1171 to this callsite. */
1172
1173 static void
1174 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1175 struct cgraph_edge *cs)
1176 {
1177 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1178 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1179 gimple call = cs->call_stmt;
1180 int arg_num = gimple_call_num_args (call);
1181
1182 if (arg_num == 0 || args->jump_functions)
1183 return;
1184 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
1185
1186 /* We will deal with constants and SSA scalars first: */
1187 compute_scalar_jump_functions (info, parms_ainfo, args, call);
1188
1189 /* Let's check whether there are any potential member pointers and if so,
1190 whether we can determine their functions as pass_through. */
1191 if (!compute_pass_through_member_ptrs (info, parms_ainfo, args, call))
1192 return;
1193
1194 /* Finally, let's check whether we actually pass a new constant member
1195 pointer here... */
1196 compute_cst_member_ptr_arguments (args, call);
1197 }
1198
1199 /* Compute jump functions for all edges - both direct and indirect - outgoing
1200 from NODE. Also count the actual arguments in the process. */
1201
1202 static void
1203 ipa_compute_jump_functions (struct cgraph_node *node,
1204 struct param_analysis_info *parms_ainfo)
1205 {
1206 struct cgraph_edge *cs;
1207
1208 for (cs = node->callees; cs; cs = cs->next_callee)
1209 {
1210 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1211 NULL);
1212 /* We do not need to bother analyzing calls to unknown
1213 functions unless they may become known during lto/whopr. */
1214 if (!callee->analyzed && !flag_lto)
1215 continue;
1216 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1217 }
1218
1219 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1220 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1221 }
1222
1223 /* If RHS looks like a rhs of a statement loading pfn from a member
1224 pointer formal parameter, return the parameter, otherwise return
1225 NULL. If USE_DELTA, then we look for a use of the delta field
1226 rather than the pfn. */
1227
1228 static tree
1229 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
1230 {
1231 tree rec, ref_field, ref_offset, fld, fld_offset, ptr_field, delta_field;
1232
1233 if (TREE_CODE (rhs) == COMPONENT_REF)
1234 {
1235 ref_field = TREE_OPERAND (rhs, 1);
1236 rhs = TREE_OPERAND (rhs, 0);
1237 }
1238 else
1239 ref_field = NULL_TREE;
1240 if (TREE_CODE (rhs) != MEM_REF)
1241 return NULL_TREE;
1242 rec = TREE_OPERAND (rhs, 0);
1243 if (TREE_CODE (rec) != ADDR_EXPR)
1244 return NULL_TREE;
1245 rec = TREE_OPERAND (rec, 0);
1246 if (TREE_CODE (rec) != PARM_DECL
1247 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1248 return NULL_TREE;
1249
1250 ref_offset = TREE_OPERAND (rhs, 1);
1251
1252 if (ref_field)
1253 {
1254 if (integer_nonzerop (ref_offset))
1255 return NULL_TREE;
1256
1257 if (use_delta)
1258 fld = delta_field;
1259 else
1260 fld = ptr_field;
1261
1262 return ref_field == fld ? rec : NULL_TREE;
1263 }
1264
1265 if (use_delta)
1266 fld_offset = byte_position (delta_field);
1267 else
1268 fld_offset = byte_position (ptr_field);
1269
1270 return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
1271 }
1272
1273 /* If STMT looks like a statement loading a value from a member pointer formal
1274 parameter, this function returns that parameter. */
1275
1276 static tree
1277 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
1278 {
1279 tree rhs;
1280
1281 if (!gimple_assign_single_p (stmt))
1282 return NULL_TREE;
1283
1284 rhs = gimple_assign_rhs1 (stmt);
1285 return ipa_get_member_ptr_load_param (rhs, use_delta);
1286 }
1287
1288 /* Returns true iff T is an SSA_NAME defined by a statement. */
1289
1290 static bool
1291 ipa_is_ssa_with_stmt_def (tree t)
1292 {
1293 if (TREE_CODE (t) == SSA_NAME
1294 && !SSA_NAME_IS_DEFAULT_DEF (t))
1295 return true;
1296 else
1297 return false;
1298 }
1299
1300 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1301 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1302 indirect call graph edge. */
1303
1304 static struct cgraph_edge *
1305 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1306 {
1307 struct cgraph_edge *cs;
1308
1309 cs = cgraph_edge (node, stmt);
1310 cs->indirect_info->param_index = param_index;
1311 cs->indirect_info->anc_offset = 0;
1312 cs->indirect_info->polymorphic = 0;
1313 return cs;
1314 }
1315
1316 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1317 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1318 intermediate information about each formal parameter. Currently it checks
1319 whether the call calls a pointer that is a formal parameter and if so, the
1320 parameter is marked with the called flag and an indirect call graph edge
1321 describing the call is created. This is very simple for ordinary pointers
1322 represented in SSA but not-so-nice when it comes to member pointers. The
1323 ugly part of this function does nothing more than trying to match the
1324 pattern of such a call. An example of such a pattern is the gimple dump
1325 below, the call is on the last line:
1326
1327 <bb 2>:
1328 f$__delta_5 = f.__delta;
1329 f$__pfn_24 = f.__pfn;
1330
1331 or
1332 <bb 2>:
1333 f$__delta_5 = MEM[(struct *)&f];
1334 f$__pfn_24 = MEM[(struct *)&f + 4B];
1335
1336 and a few lines below:
1337
1338 <bb 5>
1339 D.2496_3 = (int) f$__pfn_24;
1340 D.2497_4 = D.2496_3 & 1;
1341 if (D.2497_4 != 0)
1342 goto <bb 3>;
1343 else
1344 goto <bb 4>;
1345
1346 <bb 6>:
1347 D.2500_7 = (unsigned int) f$__delta_5;
1348 D.2501_8 = &S + D.2500_7;
1349 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1350 D.2503_10 = *D.2502_9;
1351 D.2504_12 = f$__pfn_24 + -1;
1352 D.2505_13 = (unsigned int) D.2504_12;
1353 D.2506_14 = D.2503_10 + D.2505_13;
1354 D.2507_15 = *D.2506_14;
1355 iftmp.11_16 = (String:: *) D.2507_15;
1356
1357 <bb 7>:
1358 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1359 D.2500_19 = (unsigned int) f$__delta_5;
1360 D.2508_20 = &S + D.2500_19;
1361 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1362
1363 Such patterns are results of simple calls to a member pointer:
1364
1365 int doprinting (int (MyString::* f)(int) const)
1366 {
1367 MyString S ("somestring");
1368
1369 return (S.*f)(4);
1370 }
1371 */
1372
1373 static void
1374 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1375 struct ipa_node_params *info,
1376 struct param_analysis_info *parms_ainfo,
1377 gimple call, tree target)
1378 {
1379 gimple def;
1380 tree n1, n2;
1381 gimple d1, d2;
1382 tree rec, rec2, cond;
1383 gimple branch;
1384 int index;
1385 basic_block bb, virt_bb, join;
1386
1387 if (SSA_NAME_IS_DEFAULT_DEF (target))
1388 {
1389 tree var = SSA_NAME_VAR (target);
1390 index = ipa_get_param_decl_index (info, var);
1391 if (index >= 0)
1392 ipa_note_param_call (node, index, call);
1393 return;
1394 }
1395
1396 /* Now we need to try to match the complex pattern of calling a member
1397 pointer. */
1398
1399 if (!POINTER_TYPE_P (TREE_TYPE (target))
1400 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1401 return;
1402
1403 def = SSA_NAME_DEF_STMT (target);
1404 if (gimple_code (def) != GIMPLE_PHI)
1405 return;
1406
1407 if (gimple_phi_num_args (def) != 2)
1408 return;
1409
1410 /* First, we need to check whether one of these is a load from a member
1411 pointer that is a parameter to this function. */
1412 n1 = PHI_ARG_DEF (def, 0);
1413 n2 = PHI_ARG_DEF (def, 1);
1414 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1415 return;
1416 d1 = SSA_NAME_DEF_STMT (n1);
1417 d2 = SSA_NAME_DEF_STMT (n2);
1418
1419 join = gimple_bb (def);
1420 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1421 {
1422 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1423 return;
1424
1425 bb = EDGE_PRED (join, 0)->src;
1426 virt_bb = gimple_bb (d2);
1427 }
1428 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1429 {
1430 bb = EDGE_PRED (join, 1)->src;
1431 virt_bb = gimple_bb (d1);
1432 }
1433 else
1434 return;
1435
1436 /* Second, we need to check that the basic blocks are laid out in the way
1437 corresponding to the pattern. */
1438
1439 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1440 || single_pred (virt_bb) != bb
1441 || single_succ (virt_bb) != join)
1442 return;
1443
1444 /* Third, let's see that the branching is done depending on the least
1445 significant bit of the pfn. */
1446
1447 branch = last_stmt (bb);
1448 if (!branch || gimple_code (branch) != GIMPLE_COND)
1449 return;
1450
1451 if ((gimple_cond_code (branch) != NE_EXPR
1452 && gimple_cond_code (branch) != EQ_EXPR)
1453 || !integer_zerop (gimple_cond_rhs (branch)))
1454 return;
1455
1456 cond = gimple_cond_lhs (branch);
1457 if (!ipa_is_ssa_with_stmt_def (cond))
1458 return;
1459
1460 def = SSA_NAME_DEF_STMT (cond);
1461 if (!is_gimple_assign (def)
1462 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1463 || !integer_onep (gimple_assign_rhs2 (def)))
1464 return;
1465
1466 cond = gimple_assign_rhs1 (def);
1467 if (!ipa_is_ssa_with_stmt_def (cond))
1468 return;
1469
1470 def = SSA_NAME_DEF_STMT (cond);
1471
1472 if (is_gimple_assign (def)
1473 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1474 {
1475 cond = gimple_assign_rhs1 (def);
1476 if (!ipa_is_ssa_with_stmt_def (cond))
1477 return;
1478 def = SSA_NAME_DEF_STMT (cond);
1479 }
1480
1481 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1482 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1483 == ptrmemfunc_vbit_in_delta));
1484
1485 if (rec != rec2)
1486 return;
1487
1488 index = ipa_get_param_decl_index (info, rec);
1489 if (index >= 0 && !is_parm_modified_before_stmt (&parms_ainfo[index],
1490 call, rec))
1491 ipa_note_param_call (node, index, call);
1492
1493 return;
1494 }
1495
1496 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1497 object referenced in the expression is a formal parameter of the caller
1498 (described by INFO), create a call note for the statement. */
1499
1500 static void
1501 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1502 struct ipa_node_params *info, gimple call,
1503 tree target)
1504 {
1505 struct cgraph_edge *cs;
1506 struct cgraph_indirect_call_info *ii;
1507 struct ipa_jump_func jfunc;
1508 tree obj = OBJ_TYPE_REF_OBJECT (target);
1509 int index;
1510 HOST_WIDE_INT anc_offset;
1511
1512 if (!flag_devirtualize)
1513 return;
1514
1515 if (TREE_CODE (obj) != SSA_NAME)
1516 return;
1517
1518 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1519 {
1520 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1521 return;
1522
1523 anc_offset = 0;
1524 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1525 gcc_assert (index >= 0);
1526 if (detect_type_change_ssa (obj, call, &jfunc))
1527 return;
1528 }
1529 else
1530 {
1531 gimple stmt = SSA_NAME_DEF_STMT (obj);
1532 tree expr;
1533
1534 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1535 if (!expr)
1536 return;
1537 index = ipa_get_param_decl_index (info,
1538 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1539 gcc_assert (index >= 0);
1540 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1541 return;
1542 }
1543
1544 cs = ipa_note_param_call (node, index, call);
1545 ii = cs->indirect_info;
1546 ii->anc_offset = anc_offset;
1547 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1548 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1549 ii->polymorphic = 1;
1550 }
1551
1552 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1553 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1554 containing intermediate information about each formal parameter. */
1555
1556 static void
1557 ipa_analyze_call_uses (struct cgraph_node *node,
1558 struct ipa_node_params *info,
1559 struct param_analysis_info *parms_ainfo, gimple call)
1560 {
1561 tree target = gimple_call_fn (call);
1562
1563 if (!target)
1564 return;
1565 if (TREE_CODE (target) == SSA_NAME)
1566 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1567 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1568 ipa_analyze_virtual_call_uses (node, info, call, target);
1569 }
1570
1571
1572 /* Analyze the call statement STMT with respect to formal parameters (described
1573 in INFO) of caller given by NODE. Currently it only checks whether formal
1574 parameters are called. PARMS_AINFO is a pointer to a vector containing
1575 intermediate information about each formal parameter. */
1576
1577 static void
1578 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1579 struct param_analysis_info *parms_ainfo, gimple stmt)
1580 {
1581 if (is_gimple_call (stmt))
1582 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1583 }
1584
1585 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1586 If OP is a parameter declaration, mark it as used in the info structure
1587 passed in DATA. */
1588
1589 static bool
1590 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1591 tree op, void *data)
1592 {
1593 struct ipa_node_params *info = (struct ipa_node_params *) data;
1594
1595 op = get_base_address (op);
1596 if (op
1597 && TREE_CODE (op) == PARM_DECL)
1598 {
1599 int index = ipa_get_param_decl_index (info, op);
1600 gcc_assert (index >= 0);
1601 ipa_set_param_used (info, index, true);
1602 }
1603
1604 return false;
1605 }
1606
1607 /* Scan the function body of NODE and inspect the uses of formal parameters.
1608 Store the findings in various structures of the associated ipa_node_params
1609 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1610 vector containing intermediate information about each formal parameter. */
1611
1612 static void
1613 ipa_analyze_params_uses (struct cgraph_node *node,
1614 struct param_analysis_info *parms_ainfo)
1615 {
1616 tree decl = node->symbol.decl;
1617 basic_block bb;
1618 struct function *func;
1619 gimple_stmt_iterator gsi;
1620 struct ipa_node_params *info = IPA_NODE_REF (node);
1621 int i;
1622
1623 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1624 return;
1625
1626 for (i = 0; i < ipa_get_param_count (info); i++)
1627 {
1628 tree parm = ipa_get_param (info, i);
1629 /* For SSA regs see if parameter is used. For non-SSA we compute
1630 the flag during modification analysis. */
1631 if (is_gimple_reg (parm)
1632 && gimple_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl), parm))
1633 ipa_set_param_used (info, i, true);
1634 }
1635
1636 func = DECL_STRUCT_FUNCTION (decl);
1637 FOR_EACH_BB_FN (bb, func)
1638 {
1639 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1640 {
1641 gimple stmt = gsi_stmt (gsi);
1642
1643 if (is_gimple_debug (stmt))
1644 continue;
1645
1646 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1647 walk_stmt_load_store_addr_ops (stmt, info,
1648 visit_ref_for_mod_analysis,
1649 visit_ref_for_mod_analysis,
1650 visit_ref_for_mod_analysis);
1651 }
1652 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1653 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1654 visit_ref_for_mod_analysis,
1655 visit_ref_for_mod_analysis,
1656 visit_ref_for_mod_analysis);
1657 }
1658
1659 info->uses_analysis_done = 1;
1660 }
1661
1662 /* Initialize the array describing properties of of formal parameters
1663 of NODE, analyze their uses and compute jump functions associated
1664 with actual arguments of calls from within NODE. */
1665
1666 void
1667 ipa_analyze_node (struct cgraph_node *node)
1668 {
1669 struct ipa_node_params *info;
1670 struct param_analysis_info *parms_ainfo;
1671 int i, param_count;
1672
1673 ipa_check_create_node_params ();
1674 ipa_check_create_edge_args ();
1675 info = IPA_NODE_REF (node);
1676 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
1677 current_function_decl = node->symbol.decl;
1678 ipa_initialize_node_params (node);
1679
1680 param_count = ipa_get_param_count (info);
1681 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1682 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1683
1684 ipa_analyze_params_uses (node, parms_ainfo);
1685 ipa_compute_jump_functions (node, parms_ainfo);
1686
1687 for (i = 0; i < param_count; i++)
1688 if (parms_ainfo[i].visited_statements)
1689 BITMAP_FREE (parms_ainfo[i].visited_statements);
1690
1691 current_function_decl = NULL;
1692 pop_cfun ();
1693 }
1694
1695
1696 /* Update the jump function DST when the call graph edge corresponding to SRC is
1697 is being inlined, knowing that DST is of type ancestor and src of known
1698 type. */
1699
1700 static void
1701 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1702 struct ipa_jump_func *dst)
1703 {
1704 HOST_WIDE_INT combined_offset;
1705 tree combined_type;
1706
1707 combined_offset = ipa_get_jf_known_type_offset (src)
1708 + ipa_get_jf_ancestor_offset (dst);
1709 combined_type = ipa_get_jf_ancestor_type (dst);
1710
1711 ipa_set_jf_known_type (dst, combined_offset,
1712 ipa_get_jf_known_type_base_type (src),
1713 combined_type);
1714 }
1715
1716 /* Update the jump functions associated with call graph edge E when the call
1717 graph edge CS is being inlined, assuming that E->caller is already (possibly
1718 indirectly) inlined into CS->callee and that E has not been inlined. */
1719
1720 static void
1721 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1722 struct cgraph_edge *e)
1723 {
1724 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1725 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1726 int count = ipa_get_cs_argument_count (args);
1727 int i;
1728
1729 for (i = 0; i < count; i++)
1730 {
1731 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1732
1733 if (dst->type == IPA_JF_ANCESTOR)
1734 {
1735 struct ipa_jump_func *src;
1736
1737 /* Variable number of arguments can cause havoc if we try to access
1738 one that does not exist in the inlined edge. So make sure we
1739 don't. */
1740 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1741 {
1742 dst->type = IPA_JF_UNKNOWN;
1743 continue;
1744 }
1745
1746 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1747 if (src->type == IPA_JF_KNOWN_TYPE)
1748 combine_known_type_and_ancestor_jfs (src, dst);
1749 else if (src->type == IPA_JF_PASS_THROUGH
1750 && src->value.pass_through.operation == NOP_EXPR)
1751 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1752 else if (src->type == IPA_JF_ANCESTOR)
1753 {
1754 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1755 dst->value.ancestor.offset += src->value.ancestor.offset;
1756 }
1757 else
1758 dst->type = IPA_JF_UNKNOWN;
1759 }
1760 else if (dst->type == IPA_JF_PASS_THROUGH)
1761 {
1762 struct ipa_jump_func *src;
1763 /* We must check range due to calls with variable number of arguments
1764 and we cannot combine jump functions with operations. */
1765 if (dst->value.pass_through.operation == NOP_EXPR
1766 && (dst->value.pass_through.formal_id
1767 < ipa_get_cs_argument_count (top)))
1768 {
1769 src = ipa_get_ith_jump_func (top,
1770 dst->value.pass_through.formal_id);
1771 *dst = *src;
1772 }
1773 else
1774 dst->type = IPA_JF_UNKNOWN;
1775 }
1776 }
1777 }
1778
1779 /* If TARGET is an addr_expr of a function declaration, make it the destination
1780 of an indirect edge IE and return the edge. Otherwise, return NULL. */
1781
1782 struct cgraph_edge *
1783 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
1784 {
1785 struct cgraph_node *callee;
1786
1787 if (TREE_CODE (target) == ADDR_EXPR)
1788 target = TREE_OPERAND (target, 0);
1789 if (TREE_CODE (target) != FUNCTION_DECL)
1790 return NULL;
1791 callee = cgraph_get_node (target);
1792 if (!callee)
1793 return NULL;
1794 ipa_check_create_node_params ();
1795
1796 /* We can not make edges to inline clones. It is bug that someone removed
1797 the cgraph node too early. */
1798 gcc_assert (!callee->global.inlined_to);
1799
1800 cgraph_make_edge_direct (ie, callee);
1801 if (dump_file)
1802 {
1803 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1804 "(%s/%i -> %s/%i), for stmt ",
1805 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1806 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
1807 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
1808 if (ie->call_stmt)
1809 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1810 else
1811 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1812 }
1813 callee = cgraph_function_or_thunk_node (callee, NULL);
1814
1815 return ie;
1816 }
1817
1818 /* Try to find a destination for indirect edge IE that corresponds to a simple
1819 call or a call of a member function pointer and where the destination is a
1820 pointer formal parameter described by jump function JFUNC. If it can be
1821 determined, return the newly direct edge, otherwise return NULL. */
1822
1823 static struct cgraph_edge *
1824 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1825 struct ipa_jump_func *jfunc)
1826 {
1827 tree target;
1828
1829 if (jfunc->type == IPA_JF_CONST)
1830 target = ipa_get_jf_constant (jfunc);
1831 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1832 target = ipa_get_jf_member_ptr_pfn (jfunc);
1833 else
1834 return NULL;
1835
1836 return ipa_make_edge_direct_to_target (ie, target);
1837 }
1838
1839 /* Try to find a destination for indirect edge IE that corresponds to a
1840 virtual call based on a formal parameter which is described by jump
1841 function JFUNC and if it can be determined, make it direct and return the
1842 direct edge. Otherwise, return NULL. */
1843
1844 static struct cgraph_edge *
1845 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1846 struct ipa_jump_func *jfunc)
1847 {
1848 tree binfo, target;
1849
1850 if (jfunc->type != IPA_JF_KNOWN_TYPE)
1851 return NULL;
1852
1853 binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
1854 gcc_checking_assert (binfo);
1855 binfo = get_binfo_at_offset (binfo, ipa_get_jf_known_type_offset (jfunc)
1856 + ie->indirect_info->anc_offset,
1857 ie->indirect_info->otr_type);
1858 if (binfo)
1859 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
1860 binfo);
1861 else
1862 return NULL;
1863
1864 if (target)
1865 return ipa_make_edge_direct_to_target (ie, target);
1866 else
1867 return NULL;
1868 }
1869
1870 /* Update the param called notes associated with NODE when CS is being inlined,
1871 assuming NODE is (potentially indirectly) inlined into CS->callee.
1872 Moreover, if the callee is discovered to be constant, create a new cgraph
1873 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1874 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1875
1876 static bool
1877 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1878 struct cgraph_node *node,
1879 VEC (cgraph_edge_p, heap) **new_edges)
1880 {
1881 struct ipa_edge_args *top;
1882 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1883 bool res = false;
1884
1885 ipa_check_create_edge_args ();
1886 top = IPA_EDGE_REF (cs);
1887
1888 for (ie = node->indirect_calls; ie; ie = next_ie)
1889 {
1890 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1891 struct ipa_jump_func *jfunc;
1892
1893 next_ie = ie->next_callee;
1894
1895 if (ici->param_index == -1)
1896 continue;
1897
1898 /* We must check range due to calls with variable number of arguments: */
1899 if (ici->param_index >= ipa_get_cs_argument_count (top))
1900 {
1901 ici->param_index = -1;
1902 continue;
1903 }
1904
1905 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1906 if (jfunc->type == IPA_JF_PASS_THROUGH
1907 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1908 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
1909 else if (jfunc->type == IPA_JF_ANCESTOR)
1910 {
1911 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
1912 ici->anc_offset += ipa_get_jf_ancestor_offset (jfunc);
1913 }
1914 else
1915 /* Either we can find a destination for this edge now or never. */
1916 ici->param_index = -1;
1917
1918 if (!flag_indirect_inlining)
1919 continue;
1920
1921 if (ici->polymorphic)
1922 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1923 else
1924 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1925
1926 if (new_direct_edge)
1927 {
1928 new_direct_edge->indirect_inlining_edge = 1;
1929 if (new_direct_edge->call_stmt)
1930 new_direct_edge->call_stmt_cannot_inline_p
1931 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
1932 new_direct_edge->callee->symbol.decl);
1933 if (new_edges)
1934 {
1935 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1936 new_direct_edge);
1937 top = IPA_EDGE_REF (cs);
1938 res = true;
1939 }
1940 }
1941 }
1942
1943 return res;
1944 }
1945
1946 /* Recursively traverse subtree of NODE (including node) made of inlined
1947 cgraph_edges when CS has been inlined and invoke
1948 update_indirect_edges_after_inlining on all nodes and
1949 update_jump_functions_after_inlining on all non-inlined edges that lead out
1950 of this subtree. Newly discovered indirect edges will be added to
1951 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1952 created. */
1953
1954 static bool
1955 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1956 struct cgraph_node *node,
1957 VEC (cgraph_edge_p, heap) **new_edges)
1958 {
1959 struct cgraph_edge *e;
1960 bool res;
1961
1962 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1963
1964 for (e = node->callees; e; e = e->next_callee)
1965 if (!e->inline_failed)
1966 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1967 else
1968 update_jump_functions_after_inlining (cs, e);
1969 for (e = node->indirect_calls; e; e = e->next_callee)
1970 update_jump_functions_after_inlining (cs, e);
1971
1972 return res;
1973 }
1974
1975 /* Update jump functions and call note functions on inlining the call site CS.
1976 CS is expected to lead to a node already cloned by
1977 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1978 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1979 created. */
1980
1981 bool
1982 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1983 VEC (cgraph_edge_p, heap) **new_edges)
1984 {
1985 bool changed;
1986 /* Do nothing if the preparation phase has not been carried out yet
1987 (i.e. during early inlining). */
1988 if (!ipa_node_params_vector)
1989 return false;
1990 gcc_assert (ipa_edge_args_vector);
1991
1992 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1993
1994 /* We do not keep jump functions of inlined edges up to date. Better to free
1995 them so we do not access them accidentally. */
1996 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1997 return changed;
1998 }
1999
2000 /* Frees all dynamically allocated structures that the argument info points
2001 to. */
2002
2003 void
2004 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2005 {
2006 if (args->jump_functions)
2007 ggc_free (args->jump_functions);
2008
2009 memset (args, 0, sizeof (*args));
2010 }
2011
2012 /* Free all ipa_edge structures. */
2013
2014 void
2015 ipa_free_all_edge_args (void)
2016 {
2017 int i;
2018 struct ipa_edge_args *args;
2019
2020 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
2021 ipa_free_edge_args_substructures (args);
2022
2023 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
2024 ipa_edge_args_vector = NULL;
2025 }
2026
2027 /* Frees all dynamically allocated structures that the param info points
2028 to. */
2029
2030 void
2031 ipa_free_node_params_substructures (struct ipa_node_params *info)
2032 {
2033 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
2034 free (info->lattices);
2035 /* Lattice values and their sources are deallocated with their alocation
2036 pool. */
2037 VEC_free (tree, heap, info->known_vals);
2038 memset (info, 0, sizeof (*info));
2039 }
2040
2041 /* Free all ipa_node_params structures. */
2042
2043 void
2044 ipa_free_all_node_params (void)
2045 {
2046 int i;
2047 struct ipa_node_params *info;
2048
2049 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
2050 ipa_free_node_params_substructures (info);
2051
2052 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
2053 ipa_node_params_vector = NULL;
2054 }
2055
2056 /* Hook that is called by cgraph.c when an edge is removed. */
2057
2058 static void
2059 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2060 {
2061 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2062 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
2063 <= (unsigned)cs->uid)
2064 return;
2065 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2066 }
2067
2068 /* Hook that is called by cgraph.c when a node is removed. */
2069
2070 static void
2071 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2072 {
2073 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2074 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
2075 <= (unsigned)node->uid)
2076 return;
2077 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2078 }
2079
2080 /* Hook that is called by cgraph.c when a node is duplicated. */
2081
2082 static void
2083 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2084 __attribute__((unused)) void *data)
2085 {
2086 struct ipa_edge_args *old_args, *new_args;
2087
2088 ipa_check_create_edge_args ();
2089
2090 old_args = IPA_EDGE_REF (src);
2091 new_args = IPA_EDGE_REF (dst);
2092
2093 new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
2094 old_args->jump_functions);
2095 }
2096
2097 /* Hook that is called by cgraph.c when a node is duplicated. */
2098
2099 static void
2100 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2101 ATTRIBUTE_UNUSED void *data)
2102 {
2103 struct ipa_node_params *old_info, *new_info;
2104
2105 ipa_check_create_node_params ();
2106 old_info = IPA_NODE_REF (src);
2107 new_info = IPA_NODE_REF (dst);
2108
2109 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
2110 old_info->descriptors);
2111 new_info->lattices = NULL;
2112 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2113
2114 new_info->uses_analysis_done = old_info->uses_analysis_done;
2115 new_info->node_enqueued = old_info->node_enqueued;
2116 }
2117
2118
2119 /* Analyze newly added function into callgraph. */
2120
2121 static void
2122 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2123 {
2124 ipa_analyze_node (node);
2125 }
2126
2127 /* Register our cgraph hooks if they are not already there. */
2128
2129 void
2130 ipa_register_cgraph_hooks (void)
2131 {
2132 if (!edge_removal_hook_holder)
2133 edge_removal_hook_holder =
2134 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2135 if (!node_removal_hook_holder)
2136 node_removal_hook_holder =
2137 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2138 if (!edge_duplication_hook_holder)
2139 edge_duplication_hook_holder =
2140 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2141 if (!node_duplication_hook_holder)
2142 node_duplication_hook_holder =
2143 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2144 function_insertion_hook_holder =
2145 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2146 }
2147
2148 /* Unregister our cgraph hooks if they are not already there. */
2149
2150 static void
2151 ipa_unregister_cgraph_hooks (void)
2152 {
2153 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2154 edge_removal_hook_holder = NULL;
2155 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2156 node_removal_hook_holder = NULL;
2157 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2158 edge_duplication_hook_holder = NULL;
2159 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2160 node_duplication_hook_holder = NULL;
2161 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2162 function_insertion_hook_holder = NULL;
2163 }
2164
2165 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2166 longer needed after ipa-cp. */
2167
2168 void
2169 ipa_free_all_structures_after_ipa_cp (void)
2170 {
2171 if (!optimize)
2172 {
2173 ipa_free_all_edge_args ();
2174 ipa_free_all_node_params ();
2175 free_alloc_pool (ipcp_sources_pool);
2176 free_alloc_pool (ipcp_values_pool);
2177 ipa_unregister_cgraph_hooks ();
2178 }
2179 }
2180
2181 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2182 longer needed after indirect inlining. */
2183
2184 void
2185 ipa_free_all_structures_after_iinln (void)
2186 {
2187 ipa_free_all_edge_args ();
2188 ipa_free_all_node_params ();
2189 ipa_unregister_cgraph_hooks ();
2190 if (ipcp_sources_pool)
2191 free_alloc_pool (ipcp_sources_pool);
2192 if (ipcp_values_pool)
2193 free_alloc_pool (ipcp_values_pool);
2194 }
2195
2196 /* Print ipa_tree_map data structures of all functions in the
2197 callgraph to F. */
2198
2199 void
2200 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2201 {
2202 int i, count;
2203 tree temp;
2204 struct ipa_node_params *info;
2205
2206 if (!node->analyzed)
2207 return;
2208 info = IPA_NODE_REF (node);
2209 fprintf (f, " function %s parameter descriptors:\n",
2210 cgraph_node_name (node));
2211 count = ipa_get_param_count (info);
2212 for (i = 0; i < count; i++)
2213 {
2214 temp = ipa_get_param (info, i);
2215 if (TREE_CODE (temp) == PARM_DECL)
2216 fprintf (f, " param %d : %s", i,
2217 (DECL_NAME (temp)
2218 ? (*lang_hooks.decl_printable_name) (temp, 2)
2219 : "(unnamed)"));
2220 if (ipa_is_param_used (info, i))
2221 fprintf (f, " used");
2222 fprintf (f, "\n");
2223 }
2224 }
2225
2226 /* Print ipa_tree_map data structures of all functions in the
2227 callgraph to F. */
2228
2229 void
2230 ipa_print_all_params (FILE * f)
2231 {
2232 struct cgraph_node *node;
2233
2234 fprintf (f, "\nFunction parameters:\n");
2235 FOR_EACH_FUNCTION (node)
2236 ipa_print_node_params (f, node);
2237 }
2238
2239 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2240
2241 VEC(tree, heap) *
2242 ipa_get_vector_of_formal_parms (tree fndecl)
2243 {
2244 VEC(tree, heap) *args;
2245 int count;
2246 tree parm;
2247
2248 count = count_formal_params (fndecl);
2249 args = VEC_alloc (tree, heap, count);
2250 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2251 VEC_quick_push (tree, args, parm);
2252
2253 return args;
2254 }
2255
2256 /* Return a heap allocated vector containing types of formal parameters of
2257 function type FNTYPE. */
2258
2259 static inline VEC(tree, heap) *
2260 get_vector_of_formal_parm_types (tree fntype)
2261 {
2262 VEC(tree, heap) *types;
2263 int count = 0;
2264 tree t;
2265
2266 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2267 count++;
2268
2269 types = VEC_alloc (tree, heap, count);
2270 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2271 VEC_quick_push (tree, types, TREE_VALUE (t));
2272
2273 return types;
2274 }
2275
2276 /* Modify the function declaration FNDECL and its type according to the plan in
2277 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2278 to reflect the actual parameters being modified which are determined by the
2279 base_index field. */
2280
2281 void
2282 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2283 const char *synth_parm_prefix)
2284 {
2285 VEC(tree, heap) *oparms, *otypes;
2286 tree orig_type, new_type = NULL;
2287 tree old_arg_types, t, new_arg_types = NULL;
2288 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2289 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2290 tree new_reversed = NULL;
2291 bool care_for_types, last_parm_void;
2292
2293 if (!synth_parm_prefix)
2294 synth_parm_prefix = "SYNTH";
2295
2296 oparms = ipa_get_vector_of_formal_parms (fndecl);
2297 orig_type = TREE_TYPE (fndecl);
2298 old_arg_types = TYPE_ARG_TYPES (orig_type);
2299
2300 /* The following test is an ugly hack, some functions simply don't have any
2301 arguments in their type. This is probably a bug but well... */
2302 care_for_types = (old_arg_types != NULL_TREE);
2303 if (care_for_types)
2304 {
2305 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2306 == void_type_node);
2307 otypes = get_vector_of_formal_parm_types (orig_type);
2308 if (last_parm_void)
2309 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2310 else
2311 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2312 }
2313 else
2314 {
2315 last_parm_void = false;
2316 otypes = NULL;
2317 }
2318
2319 for (i = 0; i < len; i++)
2320 {
2321 struct ipa_parm_adjustment *adj;
2322 gcc_assert (link);
2323
2324 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2325 parm = VEC_index (tree, oparms, adj->base_index);
2326 adj->base = parm;
2327
2328 if (adj->copy_param)
2329 {
2330 if (care_for_types)
2331 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2332 adj->base_index),
2333 new_arg_types);
2334 *link = parm;
2335 link = &DECL_CHAIN (parm);
2336 }
2337 else if (!adj->remove_param)
2338 {
2339 tree new_parm;
2340 tree ptype;
2341
2342 if (adj->by_ref)
2343 ptype = build_pointer_type (adj->type);
2344 else
2345 ptype = adj->type;
2346
2347 if (care_for_types)
2348 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2349
2350 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2351 ptype);
2352 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2353
2354 DECL_ARTIFICIAL (new_parm) = 1;
2355 DECL_ARG_TYPE (new_parm) = ptype;
2356 DECL_CONTEXT (new_parm) = fndecl;
2357 TREE_USED (new_parm) = 1;
2358 DECL_IGNORED_P (new_parm) = 1;
2359 layout_decl (new_parm, 0);
2360
2361 add_referenced_var (new_parm);
2362 mark_sym_for_renaming (new_parm);
2363 adj->base = parm;
2364 adj->reduction = new_parm;
2365
2366 *link = new_parm;
2367
2368 link = &DECL_CHAIN (new_parm);
2369 }
2370 }
2371
2372 *link = NULL_TREE;
2373
2374 if (care_for_types)
2375 {
2376 new_reversed = nreverse (new_arg_types);
2377 if (last_parm_void)
2378 {
2379 if (new_reversed)
2380 TREE_CHAIN (new_arg_types) = void_list_node;
2381 else
2382 new_reversed = void_list_node;
2383 }
2384 }
2385
2386 /* Use copy_node to preserve as much as possible from original type
2387 (debug info, attribute lists etc.)
2388 Exception is METHOD_TYPEs must have THIS argument.
2389 When we are asked to remove it, we need to build new FUNCTION_TYPE
2390 instead. */
2391 if (TREE_CODE (orig_type) != METHOD_TYPE
2392 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2393 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2394 {
2395 new_type = build_distinct_type_copy (orig_type);
2396 TYPE_ARG_TYPES (new_type) = new_reversed;
2397 }
2398 else
2399 {
2400 new_type
2401 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2402 new_reversed));
2403 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2404 DECL_VINDEX (fndecl) = NULL_TREE;
2405 }
2406
2407 /* When signature changes, we need to clear builtin info. */
2408 if (DECL_BUILT_IN (fndecl))
2409 {
2410 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2411 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2412 }
2413
2414 /* This is a new type, not a copy of an old type. Need to reassociate
2415 variants. We can handle everything except the main variant lazily. */
2416 t = TYPE_MAIN_VARIANT (orig_type);
2417 if (orig_type != t)
2418 {
2419 TYPE_MAIN_VARIANT (new_type) = t;
2420 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2421 TYPE_NEXT_VARIANT (t) = new_type;
2422 }
2423 else
2424 {
2425 TYPE_MAIN_VARIANT (new_type) = new_type;
2426 TYPE_NEXT_VARIANT (new_type) = NULL;
2427 }
2428
2429 TREE_TYPE (fndecl) = new_type;
2430 DECL_VIRTUAL_P (fndecl) = 0;
2431 if (otypes)
2432 VEC_free (tree, heap, otypes);
2433 VEC_free (tree, heap, oparms);
2434 }
2435
2436 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2437 If this is a directly recursive call, CS must be NULL. Otherwise it must
2438 contain the corresponding call graph edge. */
2439
2440 void
2441 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2442 ipa_parm_adjustment_vec adjustments)
2443 {
2444 VEC(tree, heap) *vargs;
2445 VEC(tree, gc) **debug_args = NULL;
2446 gimple new_stmt;
2447 gimple_stmt_iterator gsi;
2448 tree callee_decl;
2449 int i, len;
2450
2451 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2452 vargs = VEC_alloc (tree, heap, len);
2453 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
2454
2455 gsi = gsi_for_stmt (stmt);
2456 for (i = 0; i < len; i++)
2457 {
2458 struct ipa_parm_adjustment *adj;
2459
2460 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2461
2462 if (adj->copy_param)
2463 {
2464 tree arg = gimple_call_arg (stmt, adj->base_index);
2465
2466 VEC_quick_push (tree, vargs, arg);
2467 }
2468 else if (!adj->remove_param)
2469 {
2470 tree expr, base, off;
2471 location_t loc;
2472
2473 /* We create a new parameter out of the value of the old one, we can
2474 do the following kind of transformations:
2475
2476 - A scalar passed by reference is converted to a scalar passed by
2477 value. (adj->by_ref is false and the type of the original
2478 actual argument is a pointer to a scalar).
2479
2480 - A part of an aggregate is passed instead of the whole aggregate.
2481 The part can be passed either by value or by reference, this is
2482 determined by value of adj->by_ref. Moreover, the code below
2483 handles both situations when the original aggregate is passed by
2484 value (its type is not a pointer) and when it is passed by
2485 reference (it is a pointer to an aggregate).
2486
2487 When the new argument is passed by reference (adj->by_ref is true)
2488 it must be a part of an aggregate and therefore we form it by
2489 simply taking the address of a reference inside the original
2490 aggregate. */
2491
2492 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2493 base = gimple_call_arg (stmt, adj->base_index);
2494 loc = EXPR_LOCATION (base);
2495
2496 if (TREE_CODE (base) != ADDR_EXPR
2497 && POINTER_TYPE_P (TREE_TYPE (base)))
2498 off = build_int_cst (adj->alias_ptr_type,
2499 adj->offset / BITS_PER_UNIT);
2500 else
2501 {
2502 HOST_WIDE_INT base_offset;
2503 tree prev_base;
2504
2505 if (TREE_CODE (base) == ADDR_EXPR)
2506 base = TREE_OPERAND (base, 0);
2507 prev_base = base;
2508 base = get_addr_base_and_unit_offset (base, &base_offset);
2509 /* Aggregate arguments can have non-invariant addresses. */
2510 if (!base)
2511 {
2512 base = build_fold_addr_expr (prev_base);
2513 off = build_int_cst (adj->alias_ptr_type,
2514 adj->offset / BITS_PER_UNIT);
2515 }
2516 else if (TREE_CODE (base) == MEM_REF)
2517 {
2518 off = build_int_cst (adj->alias_ptr_type,
2519 base_offset
2520 + adj->offset / BITS_PER_UNIT);
2521 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2522 off);
2523 base = TREE_OPERAND (base, 0);
2524 }
2525 else
2526 {
2527 off = build_int_cst (adj->alias_ptr_type,
2528 base_offset
2529 + adj->offset / BITS_PER_UNIT);
2530 base = build_fold_addr_expr (base);
2531 }
2532 }
2533
2534 if (!adj->by_ref)
2535 {
2536 tree type = adj->type;
2537 unsigned int align;
2538 unsigned HOST_WIDE_INT misalign;
2539
2540 get_pointer_alignment_1 (base, &align, &misalign);
2541 misalign += (double_int_sext (tree_to_double_int (off),
2542 TYPE_PRECISION (TREE_TYPE (off))).low
2543 * BITS_PER_UNIT);
2544 misalign = misalign & (align - 1);
2545 if (misalign != 0)
2546 align = (misalign & -misalign);
2547 if (align < TYPE_ALIGN (type))
2548 type = build_aligned_type (type, align);
2549 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2550 }
2551 else
2552 {
2553 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2554 expr = build_fold_addr_expr (expr);
2555 }
2556
2557 expr = force_gimple_operand_gsi (&gsi, expr,
2558 adj->by_ref
2559 || is_gimple_reg_type (adj->type),
2560 NULL, true, GSI_SAME_STMT);
2561 VEC_quick_push (tree, vargs, expr);
2562 }
2563 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2564 {
2565 unsigned int ix;
2566 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2567 gimple def_temp;
2568
2569 arg = gimple_call_arg (stmt, adj->base_index);
2570 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2571 {
2572 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2573 continue;
2574 arg = fold_convert_loc (gimple_location (stmt),
2575 TREE_TYPE (origin), arg);
2576 }
2577 if (debug_args == NULL)
2578 debug_args = decl_debug_args_insert (callee_decl);
2579 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
2580 if (ddecl == origin)
2581 {
2582 ddecl = VEC_index (tree, *debug_args, ix + 1);
2583 break;
2584 }
2585 if (ddecl == NULL)
2586 {
2587 ddecl = make_node (DEBUG_EXPR_DECL);
2588 DECL_ARTIFICIAL (ddecl) = 1;
2589 TREE_TYPE (ddecl) = TREE_TYPE (origin);
2590 DECL_MODE (ddecl) = DECL_MODE (origin);
2591
2592 VEC_safe_push (tree, gc, *debug_args, origin);
2593 VEC_safe_push (tree, gc, *debug_args, ddecl);
2594 }
2595 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
2596 stmt);
2597 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
2598 }
2599 }
2600
2601 if (dump_file && (dump_flags & TDF_DETAILS))
2602 {
2603 fprintf (dump_file, "replacing stmt:");
2604 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2605 }
2606
2607 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2608 VEC_free (tree, heap, vargs);
2609 if (gimple_call_lhs (stmt))
2610 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2611
2612 gimple_set_block (new_stmt, gimple_block (stmt));
2613 if (gimple_has_location (stmt))
2614 gimple_set_location (new_stmt, gimple_location (stmt));
2615 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2616 gimple_call_copy_flags (new_stmt, stmt);
2617
2618 if (dump_file && (dump_flags & TDF_DETAILS))
2619 {
2620 fprintf (dump_file, "with stmt:");
2621 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2622 fprintf (dump_file, "\n");
2623 }
2624 gsi_replace (&gsi, new_stmt, true);
2625 if (cs)
2626 cgraph_set_call_stmt (cs, new_stmt);
2627 update_ssa (TODO_update_ssa);
2628 free_dominance_info (CDI_DOMINATORS);
2629 }
2630
2631 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2632
2633 static bool
2634 index_in_adjustments_multiple_times_p (int base_index,
2635 ipa_parm_adjustment_vec adjustments)
2636 {
2637 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2638 bool one = false;
2639
2640 for (i = 0; i < len; i++)
2641 {
2642 struct ipa_parm_adjustment *adj;
2643 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2644
2645 if (adj->base_index == base_index)
2646 {
2647 if (one)
2648 return true;
2649 else
2650 one = true;
2651 }
2652 }
2653 return false;
2654 }
2655
2656
2657 /* Return adjustments that should have the same effect on function parameters
2658 and call arguments as if they were first changed according to adjustments in
2659 INNER and then by adjustments in OUTER. */
2660
2661 ipa_parm_adjustment_vec
2662 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2663 ipa_parm_adjustment_vec outer)
2664 {
2665 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2666 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2667 int removals = 0;
2668 ipa_parm_adjustment_vec adjustments, tmp;
2669
2670 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2671 for (i = 0; i < inlen; i++)
2672 {
2673 struct ipa_parm_adjustment *n;
2674 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2675
2676 if (n->remove_param)
2677 removals++;
2678 else
2679 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2680 }
2681
2682 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2683 for (i = 0; i < outlen; i++)
2684 {
2685 struct ipa_parm_adjustment *r;
2686 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2687 outer, i);
2688 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2689 out->base_index);
2690
2691 gcc_assert (!in->remove_param);
2692 if (out->remove_param)
2693 {
2694 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2695 {
2696 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2697 memset (r, 0, sizeof (*r));
2698 r->remove_param = true;
2699 }
2700 continue;
2701 }
2702
2703 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2704 memset (r, 0, sizeof (*r));
2705 r->base_index = in->base_index;
2706 r->type = out->type;
2707
2708 /* FIXME: Create nonlocal value too. */
2709
2710 if (in->copy_param && out->copy_param)
2711 r->copy_param = true;
2712 else if (in->copy_param)
2713 r->offset = out->offset;
2714 else if (out->copy_param)
2715 r->offset = in->offset;
2716 else
2717 r->offset = in->offset + out->offset;
2718 }
2719
2720 for (i = 0; i < inlen; i++)
2721 {
2722 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2723 inner, i);
2724
2725 if (n->remove_param)
2726 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2727 }
2728
2729 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2730 return adjustments;
2731 }
2732
2733 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2734 friendly way, assuming they are meant to be applied to FNDECL. */
2735
2736 void
2737 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2738 tree fndecl)
2739 {
2740 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2741 bool first = true;
2742 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2743
2744 fprintf (file, "IPA param adjustments: ");
2745 for (i = 0; i < len; i++)
2746 {
2747 struct ipa_parm_adjustment *adj;
2748 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2749
2750 if (!first)
2751 fprintf (file, " ");
2752 else
2753 first = false;
2754
2755 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2756 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2757 if (adj->base)
2758 {
2759 fprintf (file, ", base: ");
2760 print_generic_expr (file, adj->base, 0);
2761 }
2762 if (adj->reduction)
2763 {
2764 fprintf (file, ", reduction: ");
2765 print_generic_expr (file, adj->reduction, 0);
2766 }
2767 if (adj->new_ssa_base)
2768 {
2769 fprintf (file, ", new_ssa_base: ");
2770 print_generic_expr (file, adj->new_ssa_base, 0);
2771 }
2772
2773 if (adj->copy_param)
2774 fprintf (file, ", copy_param");
2775 else if (adj->remove_param)
2776 fprintf (file, ", remove_param");
2777 else
2778 fprintf (file, ", offset %li", (long) adj->offset);
2779 if (adj->by_ref)
2780 fprintf (file, ", by_ref");
2781 print_node_brief (file, ", type: ", adj->type, 0);
2782 fprintf (file, "\n");
2783 }
2784 VEC_free (tree, heap, parms);
2785 }
2786
2787 /* Stream out jump function JUMP_FUNC to OB. */
2788
2789 static void
2790 ipa_write_jump_function (struct output_block *ob,
2791 struct ipa_jump_func *jump_func)
2792 {
2793 streamer_write_uhwi (ob, jump_func->type);
2794
2795 switch (jump_func->type)
2796 {
2797 case IPA_JF_UNKNOWN:
2798 break;
2799 case IPA_JF_KNOWN_TYPE:
2800 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
2801 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
2802 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
2803 break;
2804 case IPA_JF_CONST:
2805 stream_write_tree (ob, jump_func->value.constant, true);
2806 break;
2807 case IPA_JF_PASS_THROUGH:
2808 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
2809 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
2810 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
2811 break;
2812 case IPA_JF_ANCESTOR:
2813 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
2814 stream_write_tree (ob, jump_func->value.ancestor.type, true);
2815 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
2816 break;
2817 case IPA_JF_CONST_MEMBER_PTR:
2818 stream_write_tree (ob, jump_func->value.member_cst.pfn, true);
2819 stream_write_tree (ob, jump_func->value.member_cst.delta, false);
2820 break;
2821 }
2822 }
2823
2824 /* Read in jump function JUMP_FUNC from IB. */
2825
2826 static void
2827 ipa_read_jump_function (struct lto_input_block *ib,
2828 struct ipa_jump_func *jump_func,
2829 struct data_in *data_in)
2830 {
2831 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
2832
2833 switch (jump_func->type)
2834 {
2835 case IPA_JF_UNKNOWN:
2836 break;
2837 case IPA_JF_KNOWN_TYPE:
2838 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
2839 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
2840 jump_func->value.known_type.component_type = stream_read_tree (ib,
2841 data_in);
2842 break;
2843 case IPA_JF_CONST:
2844 jump_func->value.constant = stream_read_tree (ib, data_in);
2845 break;
2846 case IPA_JF_PASS_THROUGH:
2847 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
2848 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
2849 jump_func->value.pass_through.operation
2850 = (enum tree_code) streamer_read_uhwi (ib);
2851 break;
2852 case IPA_JF_ANCESTOR:
2853 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
2854 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
2855 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
2856 break;
2857 case IPA_JF_CONST_MEMBER_PTR:
2858 jump_func->value.member_cst.pfn = stream_read_tree (ib, data_in);
2859 jump_func->value.member_cst.delta = stream_read_tree (ib, data_in);
2860 break;
2861 }
2862 }
2863
2864 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2865 relevant to indirect inlining to OB. */
2866
2867 static void
2868 ipa_write_indirect_edge_info (struct output_block *ob,
2869 struct cgraph_edge *cs)
2870 {
2871 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2872 struct bitpack_d bp;
2873
2874 streamer_write_hwi (ob, ii->param_index);
2875 streamer_write_hwi (ob, ii->anc_offset);
2876 bp = bitpack_create (ob->main_stream);
2877 bp_pack_value (&bp, ii->polymorphic, 1);
2878 streamer_write_bitpack (&bp);
2879
2880 if (ii->polymorphic)
2881 {
2882 streamer_write_hwi (ob, ii->otr_token);
2883 stream_write_tree (ob, ii->otr_type, true);
2884 }
2885 }
2886
2887 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2888 relevant to indirect inlining from IB. */
2889
2890 static void
2891 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2892 struct data_in *data_in ATTRIBUTE_UNUSED,
2893 struct cgraph_edge *cs)
2894 {
2895 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2896 struct bitpack_d bp;
2897
2898 ii->param_index = (int) streamer_read_hwi (ib);
2899 ii->anc_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
2900 bp = streamer_read_bitpack (ib);
2901 ii->polymorphic = bp_unpack_value (&bp, 1);
2902 if (ii->polymorphic)
2903 {
2904 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
2905 ii->otr_type = stream_read_tree (ib, data_in);
2906 }
2907 }
2908
2909 /* Stream out NODE info to OB. */
2910
2911 static void
2912 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2913 {
2914 int node_ref;
2915 lto_cgraph_encoder_t encoder;
2916 struct ipa_node_params *info = IPA_NODE_REF (node);
2917 int j;
2918 struct cgraph_edge *e;
2919 struct bitpack_d bp;
2920
2921 encoder = ob->decl_state->cgraph_node_encoder;
2922 node_ref = lto_cgraph_encoder_encode (encoder, node);
2923 streamer_write_uhwi (ob, node_ref);
2924
2925 bp = bitpack_create (ob->main_stream);
2926 gcc_assert (info->uses_analysis_done
2927 || ipa_get_param_count (info) == 0);
2928 gcc_assert (!info->node_enqueued);
2929 gcc_assert (!info->ipcp_orig_node);
2930 for (j = 0; j < ipa_get_param_count (info); j++)
2931 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
2932 streamer_write_bitpack (&bp);
2933 for (e = node->callees; e; e = e->next_callee)
2934 {
2935 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2936
2937 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
2938 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2939 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2940 }
2941 for (e = node->indirect_calls; e; e = e->next_callee)
2942 {
2943 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2944
2945 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
2946 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2947 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2948 ipa_write_indirect_edge_info (ob, e);
2949 }
2950 }
2951
2952 /* Stream in NODE info from IB. */
2953
2954 static void
2955 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2956 struct data_in *data_in)
2957 {
2958 struct ipa_node_params *info = IPA_NODE_REF (node);
2959 int k;
2960 struct cgraph_edge *e;
2961 struct bitpack_d bp;
2962
2963 ipa_initialize_node_params (node);
2964
2965 bp = streamer_read_bitpack (ib);
2966 if (ipa_get_param_count (info) != 0)
2967 info->uses_analysis_done = true;
2968 info->node_enqueued = false;
2969 for (k = 0; k < ipa_get_param_count (info); k++)
2970 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
2971 for (e = node->callees; e; e = e->next_callee)
2972 {
2973 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2974 int count = streamer_read_uhwi (ib);
2975
2976 if (!count)
2977 continue;
2978 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
2979
2980 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2981 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2982 }
2983 for (e = node->indirect_calls; e; e = e->next_callee)
2984 {
2985 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2986 int count = streamer_read_uhwi (ib);
2987
2988 if (count)
2989 {
2990 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
2991 count);
2992 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2993 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
2994 data_in);
2995 }
2996 ipa_read_indirect_edge_info (ib, data_in, e);
2997 }
2998 }
2999
3000 /* Write jump functions for nodes in SET. */
3001
3002 void
3003 ipa_prop_write_jump_functions (cgraph_node_set set)
3004 {
3005 struct cgraph_node *node;
3006 struct output_block *ob;
3007 unsigned int count = 0;
3008 cgraph_node_set_iterator csi;
3009
3010 if (!ipa_node_params_vector)
3011 return;
3012
3013 ob = create_output_block (LTO_section_jump_functions);
3014 ob->cgraph_node = NULL;
3015 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
3016 {
3017 node = csi_node (csi);
3018 if (cgraph_function_with_gimple_body_p (node)
3019 && IPA_NODE_REF (node) != NULL)
3020 count++;
3021 }
3022
3023 streamer_write_uhwi (ob, count);
3024
3025 /* Process all of the functions. */
3026 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
3027 {
3028 node = csi_node (csi);
3029 if (cgraph_function_with_gimple_body_p (node)
3030 && IPA_NODE_REF (node) != NULL)
3031 ipa_write_node_info (ob, node);
3032 }
3033 streamer_write_char_stream (ob->main_stream, 0);
3034 produce_asm (ob, NULL);
3035 destroy_output_block (ob);
3036 }
3037
3038 /* Read section in file FILE_DATA of length LEN with data DATA. */
3039
3040 static void
3041 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3042 size_t len)
3043 {
3044 const struct lto_function_header *header =
3045 (const struct lto_function_header *) data;
3046 const int cfg_offset = sizeof (struct lto_function_header);
3047 const int main_offset = cfg_offset + header->cfg_size;
3048 const int string_offset = main_offset + header->main_size;
3049 struct data_in *data_in;
3050 struct lto_input_block ib_main;
3051 unsigned int i;
3052 unsigned int count;
3053
3054 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3055 header->main_size);
3056
3057 data_in =
3058 lto_data_in_create (file_data, (const char *) data + string_offset,
3059 header->string_size, NULL);
3060 count = streamer_read_uhwi (&ib_main);
3061
3062 for (i = 0; i < count; i++)
3063 {
3064 unsigned int index;
3065 struct cgraph_node *node;
3066 lto_cgraph_encoder_t encoder;
3067
3068 index = streamer_read_uhwi (&ib_main);
3069 encoder = file_data->cgraph_node_encoder;
3070 node = lto_cgraph_encoder_deref (encoder, index);
3071 gcc_assert (node->analyzed);
3072 ipa_read_node_info (&ib_main, node, data_in);
3073 }
3074 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3075 len);
3076 lto_data_in_delete (data_in);
3077 }
3078
3079 /* Read ipcp jump functions. */
3080
3081 void
3082 ipa_prop_read_jump_functions (void)
3083 {
3084 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3085 struct lto_file_decl_data *file_data;
3086 unsigned int j = 0;
3087
3088 ipa_check_create_node_params ();
3089 ipa_check_create_edge_args ();
3090 ipa_register_cgraph_hooks ();
3091
3092 while ((file_data = file_data_vec[j++]))
3093 {
3094 size_t len;
3095 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3096
3097 if (data)
3098 ipa_prop_read_section (file_data, data, len);
3099 }
3100 }
3101
3102 /* After merging units, we can get mismatch in argument counts.
3103 Also decl merging might've rendered parameter lists obsolete.
3104 Also compute called_with_variable_arg info. */
3105
3106 void
3107 ipa_update_after_lto_read (void)
3108 {
3109 struct cgraph_node *node;
3110
3111 ipa_check_create_node_params ();
3112 ipa_check_create_edge_args ();
3113
3114 FOR_EACH_DEFINED_FUNCTION (node)
3115 if (node->analyzed)
3116 ipa_initialize_node_params (node);
3117 }