tree-flow.h (set_default_def): Rename to ...
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40
41
42 /* Intermediate information about a parameter that is only useful during the
43 run of ipa_analyze_node and is not kept afterwards. */
44
45 struct param_analysis_info
46 {
47 bool modified;
48 bitmap visited_statements;
49 };
50
51 /* Vector where the parameter infos are actually stored. */
52 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
53 /* Vector where the parameter infos are actually stored. */
54 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
55
56 /* Holders of ipa cgraph hooks: */
57 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
58 static struct cgraph_node_hook_list *node_removal_hook_holder;
59 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
60 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
61 static struct cgraph_node_hook_list *function_insertion_hook_holder;
62
63 /* Return index of the formal whose tree is PTREE in function which corresponds
64 to INFO. */
65
66 int
67 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
68 {
69 int i, count;
70
71 count = ipa_get_param_count (info);
72 for (i = 0; i < count; i++)
73 if (ipa_get_param (info, i) == ptree)
74 return i;
75
76 return -1;
77 }
78
79 /* Populate the param_decl field in parameter descriptors of INFO that
80 corresponds to NODE. */
81
82 static void
83 ipa_populate_param_decls (struct cgraph_node *node,
84 struct ipa_node_params *info)
85 {
86 tree fndecl;
87 tree fnargs;
88 tree parm;
89 int param_num;
90
91 fndecl = node->symbol.decl;
92 fnargs = DECL_ARGUMENTS (fndecl);
93 param_num = 0;
94 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
95 {
96 VEC_index (ipa_param_descriptor_t,
97 info->descriptors, param_num)->decl = parm;
98 param_num++;
99 }
100 }
101
102 /* Return how many formal parameters FNDECL has. */
103
104 static inline int
105 count_formal_params (tree fndecl)
106 {
107 tree parm;
108 int count = 0;
109
110 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
111 count++;
112
113 return count;
114 }
115
116 /* Initialize the ipa_node_params structure associated with NODE by counting
117 the function parameters, creating the descriptors and populating their
118 param_decls. */
119
120 void
121 ipa_initialize_node_params (struct cgraph_node *node)
122 {
123 struct ipa_node_params *info = IPA_NODE_REF (node);
124
125 if (!info->descriptors)
126 {
127 int param_count;
128
129 param_count = count_formal_params (node->symbol.decl);
130 if (param_count)
131 {
132 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
133 info->descriptors, param_count);
134 ipa_populate_param_decls (node, info);
135 }
136 }
137 }
138
139 /* Print the jump functions associated with call graph edge CS to file F. */
140
141 static void
142 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
143 {
144 int i, count;
145
146 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
147 for (i = 0; i < count; i++)
148 {
149 struct ipa_jump_func *jump_func;
150 enum jump_func_type type;
151
152 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
153 type = jump_func->type;
154
155 fprintf (f, " param %d: ", i);
156 if (type == IPA_JF_UNKNOWN)
157 fprintf (f, "UNKNOWN\n");
158 else if (type == IPA_JF_KNOWN_TYPE)
159 {
160 fprintf (f, "KNOWN TYPE: base ");
161 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
162 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
163 jump_func->value.known_type.offset);
164 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
165 fprintf (f, "\n");
166 }
167 else if (type == IPA_JF_CONST)
168 {
169 tree val = jump_func->value.constant;
170 fprintf (f, "CONST: ");
171 print_generic_expr (f, val, 0);
172 if (TREE_CODE (val) == ADDR_EXPR
173 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
174 {
175 fprintf (f, " -> ");
176 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
177 0);
178 }
179 fprintf (f, "\n");
180 }
181 else if (type == IPA_JF_CONST_MEMBER_PTR)
182 {
183 fprintf (f, "CONST MEMBER PTR: ");
184 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
185 fprintf (f, ", ");
186 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
187 fprintf (f, "\n");
188 }
189 else if (type == IPA_JF_PASS_THROUGH)
190 {
191 fprintf (f, "PASS THROUGH: ");
192 fprintf (f, "%d, op %s ",
193 jump_func->value.pass_through.formal_id,
194 tree_code_name[(int)
195 jump_func->value.pass_through.operation]);
196 if (jump_func->value.pass_through.operation != NOP_EXPR)
197 print_generic_expr (f,
198 jump_func->value.pass_through.operand, 0);
199 fprintf (f, "\n");
200 }
201 else if (type == IPA_JF_ANCESTOR)
202 {
203 fprintf (f, "ANCESTOR: ");
204 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
205 jump_func->value.ancestor.formal_id,
206 jump_func->value.ancestor.offset);
207 print_generic_expr (f, jump_func->value.ancestor.type, 0);
208 fprintf (f, "\n");
209 }
210 }
211 }
212
213
214 /* Print the jump functions of all arguments on all call graph edges going from
215 NODE to file F. */
216
217 void
218 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
219 {
220 struct cgraph_edge *cs;
221 int i;
222
223 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
224 for (cs = node->callees; cs; cs = cs->next_callee)
225 {
226 if (!ipa_edge_args_info_available_for_edge_p (cs))
227 continue;
228
229 fprintf (f, " callsite %s/%i -> %s/%i : \n",
230 xstrdup (cgraph_node_name (node)), node->uid,
231 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
232 ipa_print_node_jump_functions_for_edge (f, cs);
233 }
234
235 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
236 {
237 if (!ipa_edge_args_info_available_for_edge_p (cs))
238 continue;
239
240 if (cs->call_stmt)
241 {
242 fprintf (f, " indirect callsite %d for stmt ", i);
243 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
244 }
245 else
246 fprintf (f, " indirect callsite %d :\n", i);
247 ipa_print_node_jump_functions_for_edge (f, cs);
248
249 }
250 }
251
252 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
253
254 void
255 ipa_print_all_jump_functions (FILE *f)
256 {
257 struct cgraph_node *node;
258
259 fprintf (f, "\nJump functions:\n");
260 FOR_EACH_FUNCTION (node)
261 {
262 ipa_print_node_jump_functions (f, node);
263 }
264 }
265
266 /* Set JFUNC to be a known type jump function. */
267
268 static void
269 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
270 tree base_type, tree component_type)
271 {
272 jfunc->type = IPA_JF_KNOWN_TYPE;
273 jfunc->value.known_type.offset = offset,
274 jfunc->value.known_type.base_type = base_type;
275 jfunc->value.known_type.component_type = component_type;
276 }
277
278 /* Set JFUNC to be a constant jmp function. */
279
280 static void
281 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
282 {
283 jfunc->type = IPA_JF_CONST;
284 jfunc->value.constant = constant;
285 }
286
287 /* Set JFUNC to be a simple pass-through jump function. */
288 static void
289 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id)
290 {
291 jfunc->type = IPA_JF_PASS_THROUGH;
292 jfunc->value.pass_through.operand = NULL_TREE;
293 jfunc->value.pass_through.formal_id = formal_id;
294 jfunc->value.pass_through.operation = NOP_EXPR;
295 }
296
297 /* Set JFUNC to be an arithmetic pass through jump function. */
298
299 static void
300 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
301 tree operand, enum tree_code operation)
302 {
303 jfunc->type = IPA_JF_PASS_THROUGH;
304 jfunc->value.pass_through.operand = operand;
305 jfunc->value.pass_through.formal_id = formal_id;
306 jfunc->value.pass_through.operation = operation;
307 }
308
309 /* Set JFUNC to be an ancestor jump function. */
310
311 static void
312 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
313 tree type, int formal_id)
314 {
315 jfunc->type = IPA_JF_ANCESTOR;
316 jfunc->value.ancestor.formal_id = formal_id;
317 jfunc->value.ancestor.offset = offset;
318 jfunc->value.ancestor.type = type;
319 }
320
321 /* Simple function filling in a member pointer constant jump function (with PFN
322 and DELTA as the constant value) into JFUNC. */
323
324 static void
325 ipa_set_jf_member_ptr_cst (struct ipa_jump_func *jfunc,
326 tree pfn, tree delta)
327 {
328 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
329 jfunc->value.member_cst.pfn = pfn;
330 jfunc->value.member_cst.delta = delta;
331 }
332
333 /* Structure to be passed in between detect_type_change and
334 check_stmt_for_type_change. */
335
336 struct type_change_info
337 {
338 /* Offset into the object where there is the virtual method pointer we are
339 looking for. */
340 HOST_WIDE_INT offset;
341 /* The declaration or SSA_NAME pointer of the base that we are checking for
342 type change. */
343 tree object;
344 /* If we actually can tell the type that the object has changed to, it is
345 stored in this field. Otherwise it remains NULL_TREE. */
346 tree known_current_type;
347 /* Set to true if dynamic type change has been detected. */
348 bool type_maybe_changed;
349 /* Set to true if multiple types have been encountered. known_current_type
350 must be disregarded in that case. */
351 bool multiple_types_encountered;
352 };
353
354 /* Return true if STMT can modify a virtual method table pointer.
355
356 This function makes special assumptions about both constructors and
357 destructors which are all the functions that are allowed to alter the VMT
358 pointers. It assumes that destructors begin with assignment into all VMT
359 pointers and that constructors essentially look in the following way:
360
361 1) The very first thing they do is that they call constructors of ancestor
362 sub-objects that have them.
363
364 2) Then VMT pointers of this and all its ancestors is set to new values
365 corresponding to the type corresponding to the constructor.
366
367 3) Only afterwards, other stuff such as constructor of member sub-objects
368 and the code written by the user is run. Only this may include calling
369 virtual functions, directly or indirectly.
370
371 There is no way to call a constructor of an ancestor sub-object in any
372 other way.
373
374 This means that we do not have to care whether constructors get the correct
375 type information because they will always change it (in fact, if we define
376 the type to be given by the VMT pointer, it is undefined).
377
378 The most important fact to derive from the above is that if, for some
379 statement in the section 3, we try to detect whether the dynamic type has
380 changed, we can safely ignore all calls as we examine the function body
381 backwards until we reach statements in section 2 because these calls cannot
382 be ancestor constructors or destructors (if the input is not bogus) and so
383 do not change the dynamic type (this holds true only for automatically
384 allocated objects but at the moment we devirtualize only these). We then
385 must detect that statements in section 2 change the dynamic type and can try
386 to derive the new type. That is enough and we can stop, we will never see
387 the calls into constructors of sub-objects in this code. Therefore we can
388 safely ignore all call statements that we traverse.
389 */
390
391 static bool
392 stmt_may_be_vtbl_ptr_store (gimple stmt)
393 {
394 if (is_gimple_call (stmt))
395 return false;
396 else if (is_gimple_assign (stmt))
397 {
398 tree lhs = gimple_assign_lhs (stmt);
399
400 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
401 {
402 if (flag_strict_aliasing
403 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
404 return false;
405
406 if (TREE_CODE (lhs) == COMPONENT_REF
407 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
408 return false;
409 /* In the future we might want to use get_base_ref_and_offset to find
410 if there is a field corresponding to the offset and if so, proceed
411 almost like if it was a component ref. */
412 }
413 }
414 return true;
415 }
416
417 /* If STMT can be proved to be an assignment to the virtual method table
418 pointer of ANALYZED_OBJ and the type associated with the new table
419 identified, return the type. Otherwise return NULL_TREE. */
420
421 static tree
422 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
423 {
424 HOST_WIDE_INT offset, size, max_size;
425 tree lhs, rhs, base;
426
427 if (!gimple_assign_single_p (stmt))
428 return NULL_TREE;
429
430 lhs = gimple_assign_lhs (stmt);
431 rhs = gimple_assign_rhs1 (stmt);
432 if (TREE_CODE (lhs) != COMPONENT_REF
433 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
434 || TREE_CODE (rhs) != ADDR_EXPR)
435 return NULL_TREE;
436 rhs = get_base_address (TREE_OPERAND (rhs, 0));
437 if (!rhs
438 || TREE_CODE (rhs) != VAR_DECL
439 || !DECL_VIRTUAL_P (rhs))
440 return NULL_TREE;
441
442 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
443 if (offset != tci->offset
444 || size != POINTER_SIZE
445 || max_size != POINTER_SIZE)
446 return NULL_TREE;
447 if (TREE_CODE (base) == MEM_REF)
448 {
449 if (TREE_CODE (tci->object) != MEM_REF
450 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
451 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
452 TREE_OPERAND (base, 1)))
453 return NULL_TREE;
454 }
455 else if (tci->object != base)
456 return NULL_TREE;
457
458 return DECL_CONTEXT (rhs);
459 }
460
461 /* Callback of walk_aliased_vdefs and a helper function for
462 detect_type_change to check whether a particular statement may modify
463 the virtual table pointer, and if possible also determine the new type of
464 the (sub-)object. It stores its result into DATA, which points to a
465 type_change_info structure. */
466
467 static bool
468 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
469 {
470 gimple stmt = SSA_NAME_DEF_STMT (vdef);
471 struct type_change_info *tci = (struct type_change_info *) data;
472
473 if (stmt_may_be_vtbl_ptr_store (stmt))
474 {
475 tree type;
476 type = extr_type_from_vtbl_ptr_store (stmt, tci);
477 if (tci->type_maybe_changed
478 && type != tci->known_current_type)
479 tci->multiple_types_encountered = true;
480 tci->known_current_type = type;
481 tci->type_maybe_changed = true;
482 return true;
483 }
484 else
485 return false;
486 }
487
488
489
490 /* Like detect_type_change but with extra argument COMP_TYPE which will become
491 the component type part of new JFUNC of dynamic type change is detected and
492 the new base type is identified. */
493
494 static bool
495 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
496 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
497 {
498 struct type_change_info tci;
499 ao_ref ao;
500
501 gcc_checking_assert (DECL_P (arg)
502 || TREE_CODE (arg) == MEM_REF
503 || handled_component_p (arg));
504 /* Const calls cannot call virtual methods through VMT and so type changes do
505 not matter. */
506 if (!flag_devirtualize || !gimple_vuse (call))
507 return false;
508
509 ao_ref_init (&ao, arg);
510 ao.base = base;
511 ao.offset = offset;
512 ao.size = POINTER_SIZE;
513 ao.max_size = ao.size;
514
515 tci.offset = offset;
516 tci.object = get_base_address (arg);
517 tci.known_current_type = NULL_TREE;
518 tci.type_maybe_changed = false;
519 tci.multiple_types_encountered = false;
520
521 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
522 &tci, NULL);
523 if (!tci.type_maybe_changed)
524 return false;
525
526 if (!tci.known_current_type
527 || tci.multiple_types_encountered
528 || offset != 0)
529 jfunc->type = IPA_JF_UNKNOWN;
530 else
531 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
532
533 return true;
534 }
535
536 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
537 looking for assignments to its virtual table pointer. If it is, return true
538 and fill in the jump function JFUNC with relevant type information or set it
539 to unknown. ARG is the object itself (not a pointer to it, unless
540 dereferenced). BASE is the base of the memory access as returned by
541 get_ref_base_and_extent, as is the offset. */
542
543 static bool
544 detect_type_change (tree arg, tree base, gimple call,
545 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
546 {
547 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
548 }
549
550 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
551 SSA name (its dereference will become the base and the offset is assumed to
552 be zero). */
553
554 static bool
555 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
556 {
557 tree comp_type;
558
559 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
560 if (!flag_devirtualize
561 || !POINTER_TYPE_P (TREE_TYPE (arg))
562 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
563 return false;
564
565 comp_type = TREE_TYPE (TREE_TYPE (arg));
566 arg = build2 (MEM_REF, ptr_type_node, arg,
567 build_int_cst (ptr_type_node, 0));
568
569 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
570 }
571
572 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
573 boolean variable pointed to by DATA. */
574
575 static bool
576 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
577 void *data)
578 {
579 bool *b = (bool *) data;
580 *b = true;
581 return true;
582 }
583
584 /* Return true if the formal parameter PARM might have been modified in this
585 function before reaching the statement STMT. PARM_AINFO is a pointer to a
586 structure containing temporary information about PARM. */
587
588 static bool
589 is_parm_modified_before_stmt (struct param_analysis_info *parm_ainfo,
590 gimple stmt, tree parm)
591 {
592 bool modified = false;
593 ao_ref refd;
594
595 if (parm_ainfo->modified)
596 return true;
597
598 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
599 ao_ref_init (&refd, parm);
600 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
601 &modified, &parm_ainfo->visited_statements);
602 if (modified)
603 {
604 parm_ainfo->modified = true;
605 return true;
606 }
607 return false;
608 }
609
610 /* If STMT is an assignment that loads a value from an parameter declaration,
611 return the index of the parameter in ipa_node_params which has not been
612 modified. Otherwise return -1. */
613
614 static int
615 load_from_unmodified_param (struct ipa_node_params *info,
616 struct param_analysis_info *parms_ainfo,
617 gimple stmt)
618 {
619 int index;
620 tree op1;
621
622 if (!gimple_assign_single_p (stmt))
623 return -1;
624
625 op1 = gimple_assign_rhs1 (stmt);
626 if (TREE_CODE (op1) != PARM_DECL)
627 return -1;
628
629 index = ipa_get_param_decl_index (info, op1);
630 if (index < 0
631 || is_parm_modified_before_stmt (&parms_ainfo[index], stmt, op1))
632 return -1;
633
634 return index;
635 }
636
637 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
638 of an assignment statement STMT, try to determine whether we are actually
639 handling any of the following cases and construct an appropriate jump
640 function into JFUNC if so:
641
642 1) The passed value is loaded from a formal parameter which is not a gimple
643 register (most probably because it is addressable, the value has to be
644 scalar) and we can guarantee the value has not changed. This case can
645 therefore be described by a simple pass-through jump function. For example:
646
647 foo (int a)
648 {
649 int a.0;
650
651 a.0_2 = a;
652 bar (a.0_2);
653
654 2) The passed value can be described by a simple arithmetic pass-through
655 jump function. E.g.
656
657 foo (int a)
658 {
659 int D.2064;
660
661 D.2064_4 = a.1(D) + 4;
662 bar (D.2064_4);
663
664 This case can also occur in combination of the previous one, e.g.:
665
666 foo (int a, int z)
667 {
668 int a.0;
669 int D.2064;
670
671 a.0_3 = a;
672 D.2064_4 = a.0_3 + 4;
673 foo (D.2064_4);
674
675 3) The passed value is an address of an object within another one (which
676 also passed by reference). Such situations are described by an ancestor
677 jump function and describe situations such as:
678
679 B::foo() (struct B * const this)
680 {
681 struct A * D.1845;
682
683 D.1845_2 = &this_1(D)->D.1748;
684 A::bar (D.1845_2);
685
686 INFO is the structure describing individual parameters access different
687 stages of IPA optimizations. PARMS_AINFO contains the information that is
688 only needed for intraprocedural analysis. */
689
690 static void
691 compute_complex_assign_jump_func (struct ipa_node_params *info,
692 struct param_analysis_info *parms_ainfo,
693 struct ipa_jump_func *jfunc,
694 gimple call, gimple stmt, tree name)
695 {
696 HOST_WIDE_INT offset, size, max_size;
697 tree op1, tc_ssa, base, ssa;
698 int index;
699
700 op1 = gimple_assign_rhs1 (stmt);
701
702 if (TREE_CODE (op1) == SSA_NAME)
703 {
704 if (SSA_NAME_IS_DEFAULT_DEF (op1))
705 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
706 else
707 index = load_from_unmodified_param (info, parms_ainfo,
708 SSA_NAME_DEF_STMT (op1));
709 tc_ssa = op1;
710 }
711 else
712 {
713 index = load_from_unmodified_param (info, parms_ainfo, stmt);
714 tc_ssa = gimple_assign_lhs (stmt);
715 }
716
717 if (index >= 0)
718 {
719 tree op2 = gimple_assign_rhs2 (stmt);
720
721 if (op2)
722 {
723 if (!is_gimple_ip_invariant (op2)
724 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
725 && !useless_type_conversion_p (TREE_TYPE (name),
726 TREE_TYPE (op1))))
727 return;
728
729 ipa_set_jf_arith_pass_through (jfunc, index, op2,
730 gimple_assign_rhs_code (stmt));
731 }
732 else if (gimple_assign_single_p (stmt)
733 && !detect_type_change_ssa (tc_ssa, call, jfunc))
734 ipa_set_jf_simple_pass_through (jfunc, index);
735 return;
736 }
737
738 if (TREE_CODE (op1) != ADDR_EXPR)
739 return;
740 op1 = TREE_OPERAND (op1, 0);
741 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
742 return;
743 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
744 if (TREE_CODE (base) != MEM_REF
745 /* If this is a varying address, punt. */
746 || max_size == -1
747 || max_size != size)
748 return;
749 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
750 ssa = TREE_OPERAND (base, 0);
751 if (TREE_CODE (ssa) != SSA_NAME
752 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
753 || offset < 0)
754 return;
755
756 /* Dynamic types are changed only in constructors and destructors and */
757 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
758 if (index >= 0
759 && !detect_type_change (op1, base, call, jfunc, offset))
760 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index);
761 }
762
763 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
764 it looks like:
765
766 iftmp.1_3 = &obj_2(D)->D.1762;
767
768 The base of the MEM_REF must be a default definition SSA NAME of a
769 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
770 whole MEM_REF expression is returned and the offset calculated from any
771 handled components and the MEM_REF itself is stored into *OFFSET. The whole
772 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
773
774 static tree
775 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
776 {
777 HOST_WIDE_INT size, max_size;
778 tree expr, parm, obj;
779
780 if (!gimple_assign_single_p (assign))
781 return NULL_TREE;
782 expr = gimple_assign_rhs1 (assign);
783
784 if (TREE_CODE (expr) != ADDR_EXPR)
785 return NULL_TREE;
786 expr = TREE_OPERAND (expr, 0);
787 obj = expr;
788 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
789
790 if (TREE_CODE (expr) != MEM_REF
791 /* If this is a varying address, punt. */
792 || max_size == -1
793 || max_size != size
794 || *offset < 0)
795 return NULL_TREE;
796 parm = TREE_OPERAND (expr, 0);
797 if (TREE_CODE (parm) != SSA_NAME
798 || !SSA_NAME_IS_DEFAULT_DEF (parm)
799 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
800 return NULL_TREE;
801
802 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
803 *obj_p = obj;
804 return expr;
805 }
806
807
808 /* Given that an actual argument is an SSA_NAME that is a result of a phi
809 statement PHI, try to find out whether NAME is in fact a
810 multiple-inheritance typecast from a descendant into an ancestor of a formal
811 parameter and thus can be described by an ancestor jump function and if so,
812 write the appropriate function into JFUNC.
813
814 Essentially we want to match the following pattern:
815
816 if (obj_2(D) != 0B)
817 goto <bb 3>;
818 else
819 goto <bb 4>;
820
821 <bb 3>:
822 iftmp.1_3 = &obj_2(D)->D.1762;
823
824 <bb 4>:
825 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
826 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
827 return D.1879_6; */
828
829 static void
830 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
831 struct ipa_jump_func *jfunc,
832 gimple call, gimple phi)
833 {
834 HOST_WIDE_INT offset;
835 gimple assign, cond;
836 basic_block phi_bb, assign_bb, cond_bb;
837 tree tmp, parm, expr, obj;
838 int index, i;
839
840 if (gimple_phi_num_args (phi) != 2)
841 return;
842
843 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
844 tmp = PHI_ARG_DEF (phi, 0);
845 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
846 tmp = PHI_ARG_DEF (phi, 1);
847 else
848 return;
849 if (TREE_CODE (tmp) != SSA_NAME
850 || SSA_NAME_IS_DEFAULT_DEF (tmp)
851 || !POINTER_TYPE_P (TREE_TYPE (tmp))
852 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
853 return;
854
855 assign = SSA_NAME_DEF_STMT (tmp);
856 assign_bb = gimple_bb (assign);
857 if (!single_pred_p (assign_bb))
858 return;
859 expr = get_ancestor_addr_info (assign, &obj, &offset);
860 if (!expr)
861 return;
862 parm = TREE_OPERAND (expr, 0);
863 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
864 gcc_assert (index >= 0);
865
866 cond_bb = single_pred (assign_bb);
867 cond = last_stmt (cond_bb);
868 if (!cond
869 || gimple_code (cond) != GIMPLE_COND
870 || gimple_cond_code (cond) != NE_EXPR
871 || gimple_cond_lhs (cond) != parm
872 || !integer_zerop (gimple_cond_rhs (cond)))
873 return;
874
875 phi_bb = gimple_bb (phi);
876 for (i = 0; i < 2; i++)
877 {
878 basic_block pred = EDGE_PRED (phi_bb, i)->src;
879 if (pred != assign_bb && pred != cond_bb)
880 return;
881 }
882
883 if (!detect_type_change (obj, expr, call, jfunc, offset))
884 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index);
885 }
886
887 /* Given OP which is passed as an actual argument to a called function,
888 determine if it is possible to construct a KNOWN_TYPE jump function for it
889 and if so, create one and store it to JFUNC. */
890
891 static void
892 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
893 gimple call)
894 {
895 HOST_WIDE_INT offset, size, max_size;
896 tree base;
897
898 if (!flag_devirtualize
899 || TREE_CODE (op) != ADDR_EXPR
900 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
901 return;
902
903 op = TREE_OPERAND (op, 0);
904 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
905 if (!DECL_P (base)
906 || max_size == -1
907 || max_size != size
908 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
909 || is_global_var (base))
910 return;
911
912 if (!TYPE_BINFO (TREE_TYPE (base))
913 || detect_type_change (op, base, call, jfunc, offset))
914 return;
915
916 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
917 }
918
919
920 /* Determine the jump functions of scalar arguments. Scalar means SSA names
921 and constants of a number of selected types. INFO is the ipa_node_params
922 structure associated with the caller, PARMS_AINFO describes state of
923 analysis with respect to individual formal parameters. ARGS is the
924 ipa_edge_args structure describing the callsite CALL which is the call
925 statement being examined.*/
926
927 static void
928 compute_scalar_jump_functions (struct ipa_node_params *info,
929 struct param_analysis_info *parms_ainfo,
930 struct ipa_edge_args *args,
931 gimple call)
932 {
933 tree arg;
934 unsigned num = 0;
935
936 for (num = 0; num < gimple_call_num_args (call); num++)
937 {
938 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, num);
939 arg = gimple_call_arg (call, num);
940
941 if (is_gimple_ip_invariant (arg))
942 ipa_set_jf_constant (jfunc, arg);
943 else if (TREE_CODE (arg) == SSA_NAME)
944 {
945 if (SSA_NAME_IS_DEFAULT_DEF (arg))
946 {
947 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
948
949 if (index >= 0
950 && !detect_type_change_ssa (arg, call, jfunc))
951 ipa_set_jf_simple_pass_through (jfunc, index);
952 }
953 else
954 {
955 gimple stmt = SSA_NAME_DEF_STMT (arg);
956 if (is_gimple_assign (stmt))
957 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
958 call, stmt, arg);
959 else if (gimple_code (stmt) == GIMPLE_PHI)
960 compute_complex_ancestor_jump_func (info, jfunc, call, stmt);
961 }
962 }
963 else
964 compute_known_type_jump_func (arg, jfunc, call);
965 }
966 }
967
968 /* Inspect the given TYPE and return true iff it has the same structure (the
969 same number of fields of the same types) as a C++ member pointer. If
970 METHOD_PTR and DELTA are non-NULL, store the trees representing the
971 corresponding fields there. */
972
973 static bool
974 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
975 {
976 tree fld;
977
978 if (TREE_CODE (type) != RECORD_TYPE)
979 return false;
980
981 fld = TYPE_FIELDS (type);
982 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
983 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
984 return false;
985
986 if (method_ptr)
987 *method_ptr = fld;
988
989 fld = DECL_CHAIN (fld);
990 if (!fld || INTEGRAL_TYPE_P (fld))
991 return false;
992 if (delta)
993 *delta = fld;
994
995 if (DECL_CHAIN (fld))
996 return false;
997
998 return true;
999 }
1000
1001 /* Go through arguments of the CALL and for every one that looks like a member
1002 pointer, check whether it can be safely declared pass-through and if so,
1003 mark that to the corresponding item of jump FUNCTIONS. Return true iff
1004 there are non-pass-through member pointers within the arguments. INFO
1005 describes formal parameters of the caller. PARMS_INFO is a pointer to a
1006 vector containing intermediate information about each formal parameter. */
1007
1008 static bool
1009 compute_pass_through_member_ptrs (struct ipa_node_params *info,
1010 struct param_analysis_info *parms_ainfo,
1011 struct ipa_edge_args *args,
1012 gimple call)
1013 {
1014 bool undecided_members = false;
1015 unsigned num;
1016 tree arg;
1017
1018 for (num = 0; num < gimple_call_num_args (call); num++)
1019 {
1020 arg = gimple_call_arg (call, num);
1021
1022 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
1023 {
1024 if (TREE_CODE (arg) == PARM_DECL)
1025 {
1026 int index = ipa_get_param_decl_index (info, arg);
1027
1028 gcc_assert (index >=0);
1029 if (!is_parm_modified_before_stmt (&parms_ainfo[index], call,
1030 arg))
1031 {
1032 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args,
1033 num);
1034 ipa_set_jf_simple_pass_through (jfunc, index);
1035 }
1036 else
1037 undecided_members = true;
1038 }
1039 else
1040 undecided_members = true;
1041 }
1042 }
1043
1044 return undecided_members;
1045 }
1046
1047 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1048 return the rhs of its defining statement. */
1049
1050 static inline tree
1051 get_ssa_def_if_simple_copy (tree rhs)
1052 {
1053 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1054 {
1055 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1056
1057 if (gimple_assign_single_p (def_stmt))
1058 rhs = gimple_assign_rhs1 (def_stmt);
1059 else
1060 break;
1061 }
1062 return rhs;
1063 }
1064
1065 /* Traverse statements from CALL backwards, scanning whether the argument ARG
1066 which is a member pointer is filled in with constant values. If it is, fill
1067 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
1068 fields of the record type of the member pointer. To give an example, we
1069 look for a pattern looking like the following:
1070
1071 D.2515.__pfn ={v} printStuff;
1072 D.2515.__delta ={v} 0;
1073 i_1 = doprinting (D.2515); */
1074
1075 static void
1076 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
1077 tree delta_field, struct ipa_jump_func *jfunc)
1078 {
1079 gimple_stmt_iterator gsi;
1080 tree method = NULL_TREE;
1081 tree delta = NULL_TREE;
1082
1083 gsi = gsi_for_stmt (call);
1084
1085 gsi_prev (&gsi);
1086 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1087 {
1088 gimple stmt = gsi_stmt (gsi);
1089 tree lhs, rhs, fld;
1090
1091 if (!stmt_may_clobber_ref_p (stmt, arg))
1092 continue;
1093 if (!gimple_assign_single_p (stmt))
1094 return;
1095
1096 lhs = gimple_assign_lhs (stmt);
1097 rhs = gimple_assign_rhs1 (stmt);
1098
1099 if (TREE_CODE (lhs) != COMPONENT_REF
1100 || TREE_OPERAND (lhs, 0) != arg)
1101 return;
1102
1103 fld = TREE_OPERAND (lhs, 1);
1104 if (!method && fld == method_field)
1105 {
1106 rhs = get_ssa_def_if_simple_copy (rhs);
1107 if (TREE_CODE (rhs) == ADDR_EXPR
1108 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
1109 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
1110 {
1111 method = TREE_OPERAND (rhs, 0);
1112 if (delta)
1113 {
1114 ipa_set_jf_member_ptr_cst (jfunc, rhs, delta);
1115 return;
1116 }
1117 }
1118 else
1119 return;
1120 }
1121
1122 if (!delta && fld == delta_field)
1123 {
1124 rhs = get_ssa_def_if_simple_copy (rhs);
1125 if (TREE_CODE (rhs) == INTEGER_CST)
1126 {
1127 delta = rhs;
1128 if (method)
1129 {
1130 ipa_set_jf_member_ptr_cst (jfunc, rhs, delta);
1131 return;
1132 }
1133 }
1134 else
1135 return;
1136 }
1137 }
1138
1139 return;
1140 }
1141
1142 /* Go through the arguments of the CALL and for every member pointer within
1143 tries determine whether it is a constant. If it is, create a corresponding
1144 constant jump function in FUNCTIONS which is an array of jump functions
1145 associated with the call. */
1146
1147 static void
1148 compute_cst_member_ptr_arguments (struct ipa_edge_args *args,
1149 gimple call)
1150 {
1151 unsigned num;
1152 tree arg, method_field, delta_field;
1153
1154 for (num = 0; num < gimple_call_num_args (call); num++)
1155 {
1156 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, num);
1157 arg = gimple_call_arg (call, num);
1158
1159 if (jfunc->type == IPA_JF_UNKNOWN
1160 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
1161 &delta_field))
1162 determine_cst_member_ptr (call, arg, method_field, delta_field, jfunc);
1163 }
1164 }
1165
1166 /* Compute jump function for all arguments of callsite CS and insert the
1167 information in the jump_functions array in the ipa_edge_args corresponding
1168 to this callsite. */
1169
1170 static void
1171 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1172 struct cgraph_edge *cs)
1173 {
1174 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1175 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1176 gimple call = cs->call_stmt;
1177 int arg_num = gimple_call_num_args (call);
1178
1179 if (arg_num == 0 || args->jump_functions)
1180 return;
1181 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
1182
1183 /* We will deal with constants and SSA scalars first: */
1184 compute_scalar_jump_functions (info, parms_ainfo, args, call);
1185
1186 /* Let's check whether there are any potential member pointers and if so,
1187 whether we can determine their functions as pass_through. */
1188 if (!compute_pass_through_member_ptrs (info, parms_ainfo, args, call))
1189 return;
1190
1191 /* Finally, let's check whether we actually pass a new constant member
1192 pointer here... */
1193 compute_cst_member_ptr_arguments (args, call);
1194 }
1195
1196 /* Compute jump functions for all edges - both direct and indirect - outgoing
1197 from NODE. Also count the actual arguments in the process. */
1198
1199 static void
1200 ipa_compute_jump_functions (struct cgraph_node *node,
1201 struct param_analysis_info *parms_ainfo)
1202 {
1203 struct cgraph_edge *cs;
1204
1205 for (cs = node->callees; cs; cs = cs->next_callee)
1206 {
1207 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1208 NULL);
1209 /* We do not need to bother analyzing calls to unknown
1210 functions unless they may become known during lto/whopr. */
1211 if (!callee->analyzed && !flag_lto)
1212 continue;
1213 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1214 }
1215
1216 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1217 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1218 }
1219
1220 /* If RHS looks like a rhs of a statement loading pfn from a member
1221 pointer formal parameter, return the parameter, otherwise return
1222 NULL. If USE_DELTA, then we look for a use of the delta field
1223 rather than the pfn. */
1224
1225 static tree
1226 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
1227 {
1228 tree rec, ref_field, ref_offset, fld, fld_offset, ptr_field, delta_field;
1229
1230 if (TREE_CODE (rhs) == COMPONENT_REF)
1231 {
1232 ref_field = TREE_OPERAND (rhs, 1);
1233 rhs = TREE_OPERAND (rhs, 0);
1234 }
1235 else
1236 ref_field = NULL_TREE;
1237 if (TREE_CODE (rhs) != MEM_REF)
1238 return NULL_TREE;
1239 rec = TREE_OPERAND (rhs, 0);
1240 if (TREE_CODE (rec) != ADDR_EXPR)
1241 return NULL_TREE;
1242 rec = TREE_OPERAND (rec, 0);
1243 if (TREE_CODE (rec) != PARM_DECL
1244 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1245 return NULL_TREE;
1246
1247 ref_offset = TREE_OPERAND (rhs, 1);
1248
1249 if (ref_field)
1250 {
1251 if (integer_nonzerop (ref_offset))
1252 return NULL_TREE;
1253
1254 if (use_delta)
1255 fld = delta_field;
1256 else
1257 fld = ptr_field;
1258
1259 return ref_field == fld ? rec : NULL_TREE;
1260 }
1261
1262 if (use_delta)
1263 fld_offset = byte_position (delta_field);
1264 else
1265 fld_offset = byte_position (ptr_field);
1266
1267 return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
1268 }
1269
1270 /* If STMT looks like a statement loading a value from a member pointer formal
1271 parameter, this function returns that parameter. */
1272
1273 static tree
1274 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
1275 {
1276 tree rhs;
1277
1278 if (!gimple_assign_single_p (stmt))
1279 return NULL_TREE;
1280
1281 rhs = gimple_assign_rhs1 (stmt);
1282 return ipa_get_member_ptr_load_param (rhs, use_delta);
1283 }
1284
1285 /* Returns true iff T is an SSA_NAME defined by a statement. */
1286
1287 static bool
1288 ipa_is_ssa_with_stmt_def (tree t)
1289 {
1290 if (TREE_CODE (t) == SSA_NAME
1291 && !SSA_NAME_IS_DEFAULT_DEF (t))
1292 return true;
1293 else
1294 return false;
1295 }
1296
1297 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1298 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1299 indirect call graph edge. */
1300
1301 static struct cgraph_edge *
1302 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1303 {
1304 struct cgraph_edge *cs;
1305
1306 cs = cgraph_edge (node, stmt);
1307 cs->indirect_info->param_index = param_index;
1308 cs->indirect_info->anc_offset = 0;
1309 cs->indirect_info->polymorphic = 0;
1310 return cs;
1311 }
1312
1313 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1314 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1315 intermediate information about each formal parameter. Currently it checks
1316 whether the call calls a pointer that is a formal parameter and if so, the
1317 parameter is marked with the called flag and an indirect call graph edge
1318 describing the call is created. This is very simple for ordinary pointers
1319 represented in SSA but not-so-nice when it comes to member pointers. The
1320 ugly part of this function does nothing more than trying to match the
1321 pattern of such a call. An example of such a pattern is the gimple dump
1322 below, the call is on the last line:
1323
1324 <bb 2>:
1325 f$__delta_5 = f.__delta;
1326 f$__pfn_24 = f.__pfn;
1327
1328 or
1329 <bb 2>:
1330 f$__delta_5 = MEM[(struct *)&f];
1331 f$__pfn_24 = MEM[(struct *)&f + 4B];
1332
1333 and a few lines below:
1334
1335 <bb 5>
1336 D.2496_3 = (int) f$__pfn_24;
1337 D.2497_4 = D.2496_3 & 1;
1338 if (D.2497_4 != 0)
1339 goto <bb 3>;
1340 else
1341 goto <bb 4>;
1342
1343 <bb 6>:
1344 D.2500_7 = (unsigned int) f$__delta_5;
1345 D.2501_8 = &S + D.2500_7;
1346 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1347 D.2503_10 = *D.2502_9;
1348 D.2504_12 = f$__pfn_24 + -1;
1349 D.2505_13 = (unsigned int) D.2504_12;
1350 D.2506_14 = D.2503_10 + D.2505_13;
1351 D.2507_15 = *D.2506_14;
1352 iftmp.11_16 = (String:: *) D.2507_15;
1353
1354 <bb 7>:
1355 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1356 D.2500_19 = (unsigned int) f$__delta_5;
1357 D.2508_20 = &S + D.2500_19;
1358 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1359
1360 Such patterns are results of simple calls to a member pointer:
1361
1362 int doprinting (int (MyString::* f)(int) const)
1363 {
1364 MyString S ("somestring");
1365
1366 return (S.*f)(4);
1367 }
1368 */
1369
1370 static void
1371 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1372 struct ipa_node_params *info,
1373 struct param_analysis_info *parms_ainfo,
1374 gimple call, tree target)
1375 {
1376 gimple def;
1377 tree n1, n2;
1378 gimple d1, d2;
1379 tree rec, rec2, cond;
1380 gimple branch;
1381 int index;
1382 basic_block bb, virt_bb, join;
1383
1384 if (SSA_NAME_IS_DEFAULT_DEF (target))
1385 {
1386 tree var = SSA_NAME_VAR (target);
1387 index = ipa_get_param_decl_index (info, var);
1388 if (index >= 0)
1389 ipa_note_param_call (node, index, call);
1390 return;
1391 }
1392
1393 /* Now we need to try to match the complex pattern of calling a member
1394 pointer. */
1395
1396 if (!POINTER_TYPE_P (TREE_TYPE (target))
1397 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1398 return;
1399
1400 def = SSA_NAME_DEF_STMT (target);
1401 if (gimple_code (def) != GIMPLE_PHI)
1402 return;
1403
1404 if (gimple_phi_num_args (def) != 2)
1405 return;
1406
1407 /* First, we need to check whether one of these is a load from a member
1408 pointer that is a parameter to this function. */
1409 n1 = PHI_ARG_DEF (def, 0);
1410 n2 = PHI_ARG_DEF (def, 1);
1411 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1412 return;
1413 d1 = SSA_NAME_DEF_STMT (n1);
1414 d2 = SSA_NAME_DEF_STMT (n2);
1415
1416 join = gimple_bb (def);
1417 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1418 {
1419 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1420 return;
1421
1422 bb = EDGE_PRED (join, 0)->src;
1423 virt_bb = gimple_bb (d2);
1424 }
1425 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1426 {
1427 bb = EDGE_PRED (join, 1)->src;
1428 virt_bb = gimple_bb (d1);
1429 }
1430 else
1431 return;
1432
1433 /* Second, we need to check that the basic blocks are laid out in the way
1434 corresponding to the pattern. */
1435
1436 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1437 || single_pred (virt_bb) != bb
1438 || single_succ (virt_bb) != join)
1439 return;
1440
1441 /* Third, let's see that the branching is done depending on the least
1442 significant bit of the pfn. */
1443
1444 branch = last_stmt (bb);
1445 if (!branch || gimple_code (branch) != GIMPLE_COND)
1446 return;
1447
1448 if ((gimple_cond_code (branch) != NE_EXPR
1449 && gimple_cond_code (branch) != EQ_EXPR)
1450 || !integer_zerop (gimple_cond_rhs (branch)))
1451 return;
1452
1453 cond = gimple_cond_lhs (branch);
1454 if (!ipa_is_ssa_with_stmt_def (cond))
1455 return;
1456
1457 def = SSA_NAME_DEF_STMT (cond);
1458 if (!is_gimple_assign (def)
1459 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1460 || !integer_onep (gimple_assign_rhs2 (def)))
1461 return;
1462
1463 cond = gimple_assign_rhs1 (def);
1464 if (!ipa_is_ssa_with_stmt_def (cond))
1465 return;
1466
1467 def = SSA_NAME_DEF_STMT (cond);
1468
1469 if (is_gimple_assign (def)
1470 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1471 {
1472 cond = gimple_assign_rhs1 (def);
1473 if (!ipa_is_ssa_with_stmt_def (cond))
1474 return;
1475 def = SSA_NAME_DEF_STMT (cond);
1476 }
1477
1478 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1479 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1480 == ptrmemfunc_vbit_in_delta));
1481
1482 if (rec != rec2)
1483 return;
1484
1485 index = ipa_get_param_decl_index (info, rec);
1486 if (index >= 0 && !is_parm_modified_before_stmt (&parms_ainfo[index],
1487 call, rec))
1488 ipa_note_param_call (node, index, call);
1489
1490 return;
1491 }
1492
1493 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1494 object referenced in the expression is a formal parameter of the caller
1495 (described by INFO), create a call note for the statement. */
1496
1497 static void
1498 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1499 struct ipa_node_params *info, gimple call,
1500 tree target)
1501 {
1502 struct cgraph_edge *cs;
1503 struct cgraph_indirect_call_info *ii;
1504 struct ipa_jump_func jfunc;
1505 tree obj = OBJ_TYPE_REF_OBJECT (target);
1506 int index;
1507 HOST_WIDE_INT anc_offset;
1508
1509 if (!flag_devirtualize)
1510 return;
1511
1512 if (TREE_CODE (obj) != SSA_NAME)
1513 return;
1514
1515 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1516 {
1517 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1518 return;
1519
1520 anc_offset = 0;
1521 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1522 gcc_assert (index >= 0);
1523 if (detect_type_change_ssa (obj, call, &jfunc))
1524 return;
1525 }
1526 else
1527 {
1528 gimple stmt = SSA_NAME_DEF_STMT (obj);
1529 tree expr;
1530
1531 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1532 if (!expr)
1533 return;
1534 index = ipa_get_param_decl_index (info,
1535 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1536 gcc_assert (index >= 0);
1537 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1538 return;
1539 }
1540
1541 cs = ipa_note_param_call (node, index, call);
1542 ii = cs->indirect_info;
1543 ii->anc_offset = anc_offset;
1544 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1545 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1546 ii->polymorphic = 1;
1547 }
1548
1549 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1550 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1551 containing intermediate information about each formal parameter. */
1552
1553 static void
1554 ipa_analyze_call_uses (struct cgraph_node *node,
1555 struct ipa_node_params *info,
1556 struct param_analysis_info *parms_ainfo, gimple call)
1557 {
1558 tree target = gimple_call_fn (call);
1559
1560 if (!target)
1561 return;
1562 if (TREE_CODE (target) == SSA_NAME)
1563 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1564 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1565 ipa_analyze_virtual_call_uses (node, info, call, target);
1566 }
1567
1568
1569 /* Analyze the call statement STMT with respect to formal parameters (described
1570 in INFO) of caller given by NODE. Currently it only checks whether formal
1571 parameters are called. PARMS_AINFO is a pointer to a vector containing
1572 intermediate information about each formal parameter. */
1573
1574 static void
1575 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1576 struct param_analysis_info *parms_ainfo, gimple stmt)
1577 {
1578 if (is_gimple_call (stmt))
1579 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1580 }
1581
1582 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1583 If OP is a parameter declaration, mark it as used in the info structure
1584 passed in DATA. */
1585
1586 static bool
1587 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1588 tree op, void *data)
1589 {
1590 struct ipa_node_params *info = (struct ipa_node_params *) data;
1591
1592 op = get_base_address (op);
1593 if (op
1594 && TREE_CODE (op) == PARM_DECL)
1595 {
1596 int index = ipa_get_param_decl_index (info, op);
1597 gcc_assert (index >= 0);
1598 ipa_set_param_used (info, index, true);
1599 }
1600
1601 return false;
1602 }
1603
1604 /* Scan the function body of NODE and inspect the uses of formal parameters.
1605 Store the findings in various structures of the associated ipa_node_params
1606 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1607 vector containing intermediate information about each formal parameter. */
1608
1609 static void
1610 ipa_analyze_params_uses (struct cgraph_node *node,
1611 struct param_analysis_info *parms_ainfo)
1612 {
1613 tree decl = node->symbol.decl;
1614 basic_block bb;
1615 struct function *func;
1616 gimple_stmt_iterator gsi;
1617 struct ipa_node_params *info = IPA_NODE_REF (node);
1618 int i;
1619
1620 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1621 return;
1622
1623 for (i = 0; i < ipa_get_param_count (info); i++)
1624 {
1625 tree parm = ipa_get_param (info, i);
1626 tree ddef;
1627 /* For SSA regs see if parameter is used. For non-SSA we compute
1628 the flag during modification analysis. */
1629 if (is_gimple_reg (parm)
1630 && (ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1631 parm)) != NULL_TREE
1632 && !has_zero_uses (ddef))
1633 ipa_set_param_used (info, i, true);
1634 }
1635
1636 func = DECL_STRUCT_FUNCTION (decl);
1637 FOR_EACH_BB_FN (bb, func)
1638 {
1639 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1640 {
1641 gimple stmt = gsi_stmt (gsi);
1642
1643 if (is_gimple_debug (stmt))
1644 continue;
1645
1646 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1647 walk_stmt_load_store_addr_ops (stmt, info,
1648 visit_ref_for_mod_analysis,
1649 visit_ref_for_mod_analysis,
1650 visit_ref_for_mod_analysis);
1651 }
1652 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1653 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1654 visit_ref_for_mod_analysis,
1655 visit_ref_for_mod_analysis,
1656 visit_ref_for_mod_analysis);
1657 }
1658
1659 info->uses_analysis_done = 1;
1660 }
1661
1662 /* Initialize the array describing properties of of formal parameters
1663 of NODE, analyze their uses and compute jump functions associated
1664 with actual arguments of calls from within NODE. */
1665
1666 void
1667 ipa_analyze_node (struct cgraph_node *node)
1668 {
1669 struct ipa_node_params *info;
1670 struct param_analysis_info *parms_ainfo;
1671 int i, param_count;
1672
1673 ipa_check_create_node_params ();
1674 ipa_check_create_edge_args ();
1675 info = IPA_NODE_REF (node);
1676 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
1677 current_function_decl = node->symbol.decl;
1678 ipa_initialize_node_params (node);
1679
1680 param_count = ipa_get_param_count (info);
1681 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1682 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1683
1684 ipa_analyze_params_uses (node, parms_ainfo);
1685 ipa_compute_jump_functions (node, parms_ainfo);
1686
1687 for (i = 0; i < param_count; i++)
1688 if (parms_ainfo[i].visited_statements)
1689 BITMAP_FREE (parms_ainfo[i].visited_statements);
1690
1691 current_function_decl = NULL;
1692 pop_cfun ();
1693 }
1694
1695
1696 /* Update the jump function DST when the call graph edge corresponding to SRC is
1697 is being inlined, knowing that DST is of type ancestor and src of known
1698 type. */
1699
1700 static void
1701 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1702 struct ipa_jump_func *dst)
1703 {
1704 HOST_WIDE_INT combined_offset;
1705 tree combined_type;
1706
1707 combined_offset = ipa_get_jf_known_type_offset (src)
1708 + ipa_get_jf_ancestor_offset (dst);
1709 combined_type = ipa_get_jf_ancestor_type (dst);
1710
1711 ipa_set_jf_known_type (dst, combined_offset,
1712 ipa_get_jf_known_type_base_type (src),
1713 combined_type);
1714 }
1715
1716 /* Update the jump functions associated with call graph edge E when the call
1717 graph edge CS is being inlined, assuming that E->caller is already (possibly
1718 indirectly) inlined into CS->callee and that E has not been inlined. */
1719
1720 static void
1721 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1722 struct cgraph_edge *e)
1723 {
1724 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1725 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1726 int count = ipa_get_cs_argument_count (args);
1727 int i;
1728
1729 for (i = 0; i < count; i++)
1730 {
1731 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1732
1733 if (dst->type == IPA_JF_ANCESTOR)
1734 {
1735 struct ipa_jump_func *src;
1736
1737 /* Variable number of arguments can cause havoc if we try to access
1738 one that does not exist in the inlined edge. So make sure we
1739 don't. */
1740 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1741 {
1742 dst->type = IPA_JF_UNKNOWN;
1743 continue;
1744 }
1745
1746 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1747 if (src->type == IPA_JF_KNOWN_TYPE)
1748 combine_known_type_and_ancestor_jfs (src, dst);
1749 else if (src->type == IPA_JF_PASS_THROUGH
1750 && src->value.pass_through.operation == NOP_EXPR)
1751 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1752 else if (src->type == IPA_JF_ANCESTOR)
1753 {
1754 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1755 dst->value.ancestor.offset += src->value.ancestor.offset;
1756 }
1757 else
1758 dst->type = IPA_JF_UNKNOWN;
1759 }
1760 else if (dst->type == IPA_JF_PASS_THROUGH)
1761 {
1762 struct ipa_jump_func *src;
1763 /* We must check range due to calls with variable number of arguments
1764 and we cannot combine jump functions with operations. */
1765 if (dst->value.pass_through.operation == NOP_EXPR
1766 && (dst->value.pass_through.formal_id
1767 < ipa_get_cs_argument_count (top)))
1768 {
1769 src = ipa_get_ith_jump_func (top,
1770 dst->value.pass_through.formal_id);
1771 *dst = *src;
1772 }
1773 else
1774 dst->type = IPA_JF_UNKNOWN;
1775 }
1776 }
1777 }
1778
1779 /* If TARGET is an addr_expr of a function declaration, make it the destination
1780 of an indirect edge IE and return the edge. Otherwise, return NULL. */
1781
1782 struct cgraph_edge *
1783 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
1784 {
1785 struct cgraph_node *callee;
1786
1787 if (TREE_CODE (target) == ADDR_EXPR)
1788 target = TREE_OPERAND (target, 0);
1789 if (TREE_CODE (target) != FUNCTION_DECL)
1790 return NULL;
1791 callee = cgraph_get_node (target);
1792 if (!callee)
1793 return NULL;
1794 ipa_check_create_node_params ();
1795
1796 /* We can not make edges to inline clones. It is bug that someone removed
1797 the cgraph node too early. */
1798 gcc_assert (!callee->global.inlined_to);
1799
1800 cgraph_make_edge_direct (ie, callee);
1801 if (dump_file)
1802 {
1803 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1804 "(%s/%i -> %s/%i), for stmt ",
1805 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1806 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
1807 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
1808 if (ie->call_stmt)
1809 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1810 else
1811 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1812 }
1813 callee = cgraph_function_or_thunk_node (callee, NULL);
1814
1815 return ie;
1816 }
1817
1818 /* Try to find a destination for indirect edge IE that corresponds to a simple
1819 call or a call of a member function pointer and where the destination is a
1820 pointer formal parameter described by jump function JFUNC. If it can be
1821 determined, return the newly direct edge, otherwise return NULL. */
1822
1823 static struct cgraph_edge *
1824 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1825 struct ipa_jump_func *jfunc)
1826 {
1827 tree target;
1828
1829 if (jfunc->type == IPA_JF_CONST)
1830 target = ipa_get_jf_constant (jfunc);
1831 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1832 target = ipa_get_jf_member_ptr_pfn (jfunc);
1833 else
1834 return NULL;
1835
1836 return ipa_make_edge_direct_to_target (ie, target);
1837 }
1838
1839 /* Try to find a destination for indirect edge IE that corresponds to a
1840 virtual call based on a formal parameter which is described by jump
1841 function JFUNC and if it can be determined, make it direct and return the
1842 direct edge. Otherwise, return NULL. */
1843
1844 static struct cgraph_edge *
1845 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1846 struct ipa_jump_func *jfunc)
1847 {
1848 tree binfo, target;
1849
1850 if (jfunc->type != IPA_JF_KNOWN_TYPE)
1851 return NULL;
1852
1853 binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
1854 gcc_checking_assert (binfo);
1855 binfo = get_binfo_at_offset (binfo, ipa_get_jf_known_type_offset (jfunc)
1856 + ie->indirect_info->anc_offset,
1857 ie->indirect_info->otr_type);
1858 if (binfo)
1859 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
1860 binfo);
1861 else
1862 return NULL;
1863
1864 if (target)
1865 return ipa_make_edge_direct_to_target (ie, target);
1866 else
1867 return NULL;
1868 }
1869
1870 /* Update the param called notes associated with NODE when CS is being inlined,
1871 assuming NODE is (potentially indirectly) inlined into CS->callee.
1872 Moreover, if the callee is discovered to be constant, create a new cgraph
1873 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1874 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1875
1876 static bool
1877 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1878 struct cgraph_node *node,
1879 VEC (cgraph_edge_p, heap) **new_edges)
1880 {
1881 struct ipa_edge_args *top;
1882 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1883 bool res = false;
1884
1885 ipa_check_create_edge_args ();
1886 top = IPA_EDGE_REF (cs);
1887
1888 for (ie = node->indirect_calls; ie; ie = next_ie)
1889 {
1890 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1891 struct ipa_jump_func *jfunc;
1892
1893 next_ie = ie->next_callee;
1894
1895 if (ici->param_index == -1)
1896 continue;
1897
1898 /* We must check range due to calls with variable number of arguments: */
1899 if (ici->param_index >= ipa_get_cs_argument_count (top))
1900 {
1901 ici->param_index = -1;
1902 continue;
1903 }
1904
1905 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1906 if (jfunc->type == IPA_JF_PASS_THROUGH
1907 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1908 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
1909 else if (jfunc->type == IPA_JF_ANCESTOR)
1910 {
1911 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
1912 ici->anc_offset += ipa_get_jf_ancestor_offset (jfunc);
1913 }
1914 else
1915 /* Either we can find a destination for this edge now or never. */
1916 ici->param_index = -1;
1917
1918 if (!flag_indirect_inlining)
1919 continue;
1920
1921 if (ici->polymorphic)
1922 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1923 else
1924 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1925
1926 if (new_direct_edge)
1927 {
1928 new_direct_edge->indirect_inlining_edge = 1;
1929 if (new_direct_edge->call_stmt)
1930 new_direct_edge->call_stmt_cannot_inline_p
1931 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
1932 new_direct_edge->callee->symbol.decl);
1933 if (new_edges)
1934 {
1935 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1936 new_direct_edge);
1937 top = IPA_EDGE_REF (cs);
1938 res = true;
1939 }
1940 }
1941 }
1942
1943 return res;
1944 }
1945
1946 /* Recursively traverse subtree of NODE (including node) made of inlined
1947 cgraph_edges when CS has been inlined and invoke
1948 update_indirect_edges_after_inlining on all nodes and
1949 update_jump_functions_after_inlining on all non-inlined edges that lead out
1950 of this subtree. Newly discovered indirect edges will be added to
1951 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1952 created. */
1953
1954 static bool
1955 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1956 struct cgraph_node *node,
1957 VEC (cgraph_edge_p, heap) **new_edges)
1958 {
1959 struct cgraph_edge *e;
1960 bool res;
1961
1962 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1963
1964 for (e = node->callees; e; e = e->next_callee)
1965 if (!e->inline_failed)
1966 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1967 else
1968 update_jump_functions_after_inlining (cs, e);
1969 for (e = node->indirect_calls; e; e = e->next_callee)
1970 update_jump_functions_after_inlining (cs, e);
1971
1972 return res;
1973 }
1974
1975 /* Update jump functions and call note functions on inlining the call site CS.
1976 CS is expected to lead to a node already cloned by
1977 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1978 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1979 created. */
1980
1981 bool
1982 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1983 VEC (cgraph_edge_p, heap) **new_edges)
1984 {
1985 bool changed;
1986 /* Do nothing if the preparation phase has not been carried out yet
1987 (i.e. during early inlining). */
1988 if (!ipa_node_params_vector)
1989 return false;
1990 gcc_assert (ipa_edge_args_vector);
1991
1992 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1993
1994 /* We do not keep jump functions of inlined edges up to date. Better to free
1995 them so we do not access them accidentally. */
1996 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1997 return changed;
1998 }
1999
2000 /* Frees all dynamically allocated structures that the argument info points
2001 to. */
2002
2003 void
2004 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2005 {
2006 if (args->jump_functions)
2007 ggc_free (args->jump_functions);
2008
2009 memset (args, 0, sizeof (*args));
2010 }
2011
2012 /* Free all ipa_edge structures. */
2013
2014 void
2015 ipa_free_all_edge_args (void)
2016 {
2017 int i;
2018 struct ipa_edge_args *args;
2019
2020 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
2021 ipa_free_edge_args_substructures (args);
2022
2023 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
2024 ipa_edge_args_vector = NULL;
2025 }
2026
2027 /* Frees all dynamically allocated structures that the param info points
2028 to. */
2029
2030 void
2031 ipa_free_node_params_substructures (struct ipa_node_params *info)
2032 {
2033 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
2034 free (info->lattices);
2035 /* Lattice values and their sources are deallocated with their alocation
2036 pool. */
2037 VEC_free (tree, heap, info->known_vals);
2038 memset (info, 0, sizeof (*info));
2039 }
2040
2041 /* Free all ipa_node_params structures. */
2042
2043 void
2044 ipa_free_all_node_params (void)
2045 {
2046 int i;
2047 struct ipa_node_params *info;
2048
2049 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
2050 ipa_free_node_params_substructures (info);
2051
2052 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
2053 ipa_node_params_vector = NULL;
2054 }
2055
2056 /* Hook that is called by cgraph.c when an edge is removed. */
2057
2058 static void
2059 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2060 {
2061 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2062 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
2063 <= (unsigned)cs->uid)
2064 return;
2065 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2066 }
2067
2068 /* Hook that is called by cgraph.c when a node is removed. */
2069
2070 static void
2071 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2072 {
2073 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2074 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
2075 <= (unsigned)node->uid)
2076 return;
2077 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2078 }
2079
2080 /* Hook that is called by cgraph.c when a node is duplicated. */
2081
2082 static void
2083 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2084 __attribute__((unused)) void *data)
2085 {
2086 struct ipa_edge_args *old_args, *new_args;
2087
2088 ipa_check_create_edge_args ();
2089
2090 old_args = IPA_EDGE_REF (src);
2091 new_args = IPA_EDGE_REF (dst);
2092
2093 new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
2094 old_args->jump_functions);
2095 }
2096
2097 /* Hook that is called by cgraph.c when a node is duplicated. */
2098
2099 static void
2100 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2101 ATTRIBUTE_UNUSED void *data)
2102 {
2103 struct ipa_node_params *old_info, *new_info;
2104
2105 ipa_check_create_node_params ();
2106 old_info = IPA_NODE_REF (src);
2107 new_info = IPA_NODE_REF (dst);
2108
2109 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
2110 old_info->descriptors);
2111 new_info->lattices = NULL;
2112 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2113
2114 new_info->uses_analysis_done = old_info->uses_analysis_done;
2115 new_info->node_enqueued = old_info->node_enqueued;
2116 }
2117
2118
2119 /* Analyze newly added function into callgraph. */
2120
2121 static void
2122 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2123 {
2124 ipa_analyze_node (node);
2125 }
2126
2127 /* Register our cgraph hooks if they are not already there. */
2128
2129 void
2130 ipa_register_cgraph_hooks (void)
2131 {
2132 if (!edge_removal_hook_holder)
2133 edge_removal_hook_holder =
2134 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2135 if (!node_removal_hook_holder)
2136 node_removal_hook_holder =
2137 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2138 if (!edge_duplication_hook_holder)
2139 edge_duplication_hook_holder =
2140 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2141 if (!node_duplication_hook_holder)
2142 node_duplication_hook_holder =
2143 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2144 function_insertion_hook_holder =
2145 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2146 }
2147
2148 /* Unregister our cgraph hooks if they are not already there. */
2149
2150 static void
2151 ipa_unregister_cgraph_hooks (void)
2152 {
2153 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2154 edge_removal_hook_holder = NULL;
2155 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2156 node_removal_hook_holder = NULL;
2157 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2158 edge_duplication_hook_holder = NULL;
2159 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2160 node_duplication_hook_holder = NULL;
2161 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2162 function_insertion_hook_holder = NULL;
2163 }
2164
2165 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2166 longer needed after ipa-cp. */
2167
2168 void
2169 ipa_free_all_structures_after_ipa_cp (void)
2170 {
2171 if (!optimize)
2172 {
2173 ipa_free_all_edge_args ();
2174 ipa_free_all_node_params ();
2175 free_alloc_pool (ipcp_sources_pool);
2176 free_alloc_pool (ipcp_values_pool);
2177 ipa_unregister_cgraph_hooks ();
2178 }
2179 }
2180
2181 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2182 longer needed after indirect inlining. */
2183
2184 void
2185 ipa_free_all_structures_after_iinln (void)
2186 {
2187 ipa_free_all_edge_args ();
2188 ipa_free_all_node_params ();
2189 ipa_unregister_cgraph_hooks ();
2190 if (ipcp_sources_pool)
2191 free_alloc_pool (ipcp_sources_pool);
2192 if (ipcp_values_pool)
2193 free_alloc_pool (ipcp_values_pool);
2194 }
2195
2196 /* Print ipa_tree_map data structures of all functions in the
2197 callgraph to F. */
2198
2199 void
2200 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2201 {
2202 int i, count;
2203 tree temp;
2204 struct ipa_node_params *info;
2205
2206 if (!node->analyzed)
2207 return;
2208 info = IPA_NODE_REF (node);
2209 fprintf (f, " function %s parameter descriptors:\n",
2210 cgraph_node_name (node));
2211 count = ipa_get_param_count (info);
2212 for (i = 0; i < count; i++)
2213 {
2214 temp = ipa_get_param (info, i);
2215 if (TREE_CODE (temp) == PARM_DECL)
2216 fprintf (f, " param %d : %s", i,
2217 (DECL_NAME (temp)
2218 ? (*lang_hooks.decl_printable_name) (temp, 2)
2219 : "(unnamed)"));
2220 if (ipa_is_param_used (info, i))
2221 fprintf (f, " used");
2222 fprintf (f, "\n");
2223 }
2224 }
2225
2226 /* Print ipa_tree_map data structures of all functions in the
2227 callgraph to F. */
2228
2229 void
2230 ipa_print_all_params (FILE * f)
2231 {
2232 struct cgraph_node *node;
2233
2234 fprintf (f, "\nFunction parameters:\n");
2235 FOR_EACH_FUNCTION (node)
2236 ipa_print_node_params (f, node);
2237 }
2238
2239 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2240
2241 VEC(tree, heap) *
2242 ipa_get_vector_of_formal_parms (tree fndecl)
2243 {
2244 VEC(tree, heap) *args;
2245 int count;
2246 tree parm;
2247
2248 count = count_formal_params (fndecl);
2249 args = VEC_alloc (tree, heap, count);
2250 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2251 VEC_quick_push (tree, args, parm);
2252
2253 return args;
2254 }
2255
2256 /* Return a heap allocated vector containing types of formal parameters of
2257 function type FNTYPE. */
2258
2259 static inline VEC(tree, heap) *
2260 get_vector_of_formal_parm_types (tree fntype)
2261 {
2262 VEC(tree, heap) *types;
2263 int count = 0;
2264 tree t;
2265
2266 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2267 count++;
2268
2269 types = VEC_alloc (tree, heap, count);
2270 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2271 VEC_quick_push (tree, types, TREE_VALUE (t));
2272
2273 return types;
2274 }
2275
2276 /* Modify the function declaration FNDECL and its type according to the plan in
2277 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2278 to reflect the actual parameters being modified which are determined by the
2279 base_index field. */
2280
2281 void
2282 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2283 const char *synth_parm_prefix)
2284 {
2285 VEC(tree, heap) *oparms, *otypes;
2286 tree orig_type, new_type = NULL;
2287 tree old_arg_types, t, new_arg_types = NULL;
2288 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2289 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2290 tree new_reversed = NULL;
2291 bool care_for_types, last_parm_void;
2292
2293 if (!synth_parm_prefix)
2294 synth_parm_prefix = "SYNTH";
2295
2296 oparms = ipa_get_vector_of_formal_parms (fndecl);
2297 orig_type = TREE_TYPE (fndecl);
2298 old_arg_types = TYPE_ARG_TYPES (orig_type);
2299
2300 /* The following test is an ugly hack, some functions simply don't have any
2301 arguments in their type. This is probably a bug but well... */
2302 care_for_types = (old_arg_types != NULL_TREE);
2303 if (care_for_types)
2304 {
2305 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2306 == void_type_node);
2307 otypes = get_vector_of_formal_parm_types (orig_type);
2308 if (last_parm_void)
2309 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2310 else
2311 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2312 }
2313 else
2314 {
2315 last_parm_void = false;
2316 otypes = NULL;
2317 }
2318
2319 for (i = 0; i < len; i++)
2320 {
2321 struct ipa_parm_adjustment *adj;
2322 gcc_assert (link);
2323
2324 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2325 parm = VEC_index (tree, oparms, adj->base_index);
2326 adj->base = parm;
2327
2328 if (adj->copy_param)
2329 {
2330 if (care_for_types)
2331 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2332 adj->base_index),
2333 new_arg_types);
2334 *link = parm;
2335 link = &DECL_CHAIN (parm);
2336 }
2337 else if (!adj->remove_param)
2338 {
2339 tree new_parm;
2340 tree ptype;
2341
2342 if (adj->by_ref)
2343 ptype = build_pointer_type (adj->type);
2344 else
2345 ptype = adj->type;
2346
2347 if (care_for_types)
2348 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2349
2350 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2351 ptype);
2352 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2353
2354 DECL_ARTIFICIAL (new_parm) = 1;
2355 DECL_ARG_TYPE (new_parm) = ptype;
2356 DECL_CONTEXT (new_parm) = fndecl;
2357 TREE_USED (new_parm) = 1;
2358 DECL_IGNORED_P (new_parm) = 1;
2359 layout_decl (new_parm, 0);
2360
2361 adj->base = parm;
2362 adj->reduction = new_parm;
2363
2364 *link = new_parm;
2365
2366 link = &DECL_CHAIN (new_parm);
2367 }
2368 }
2369
2370 *link = NULL_TREE;
2371
2372 if (care_for_types)
2373 {
2374 new_reversed = nreverse (new_arg_types);
2375 if (last_parm_void)
2376 {
2377 if (new_reversed)
2378 TREE_CHAIN (new_arg_types) = void_list_node;
2379 else
2380 new_reversed = void_list_node;
2381 }
2382 }
2383
2384 /* Use copy_node to preserve as much as possible from original type
2385 (debug info, attribute lists etc.)
2386 Exception is METHOD_TYPEs must have THIS argument.
2387 When we are asked to remove it, we need to build new FUNCTION_TYPE
2388 instead. */
2389 if (TREE_CODE (orig_type) != METHOD_TYPE
2390 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2391 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2392 {
2393 new_type = build_distinct_type_copy (orig_type);
2394 TYPE_ARG_TYPES (new_type) = new_reversed;
2395 }
2396 else
2397 {
2398 new_type
2399 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2400 new_reversed));
2401 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2402 DECL_VINDEX (fndecl) = NULL_TREE;
2403 }
2404
2405 /* When signature changes, we need to clear builtin info. */
2406 if (DECL_BUILT_IN (fndecl))
2407 {
2408 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2409 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2410 }
2411
2412 /* This is a new type, not a copy of an old type. Need to reassociate
2413 variants. We can handle everything except the main variant lazily. */
2414 t = TYPE_MAIN_VARIANT (orig_type);
2415 if (orig_type != t)
2416 {
2417 TYPE_MAIN_VARIANT (new_type) = t;
2418 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2419 TYPE_NEXT_VARIANT (t) = new_type;
2420 }
2421 else
2422 {
2423 TYPE_MAIN_VARIANT (new_type) = new_type;
2424 TYPE_NEXT_VARIANT (new_type) = NULL;
2425 }
2426
2427 TREE_TYPE (fndecl) = new_type;
2428 DECL_VIRTUAL_P (fndecl) = 0;
2429 if (otypes)
2430 VEC_free (tree, heap, otypes);
2431 VEC_free (tree, heap, oparms);
2432 }
2433
2434 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2435 If this is a directly recursive call, CS must be NULL. Otherwise it must
2436 contain the corresponding call graph edge. */
2437
2438 void
2439 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2440 ipa_parm_adjustment_vec adjustments)
2441 {
2442 VEC(tree, heap) *vargs;
2443 VEC(tree, gc) **debug_args = NULL;
2444 gimple new_stmt;
2445 gimple_stmt_iterator gsi;
2446 tree callee_decl;
2447 int i, len;
2448
2449 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2450 vargs = VEC_alloc (tree, heap, len);
2451 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
2452
2453 gsi = gsi_for_stmt (stmt);
2454 for (i = 0; i < len; i++)
2455 {
2456 struct ipa_parm_adjustment *adj;
2457
2458 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2459
2460 if (adj->copy_param)
2461 {
2462 tree arg = gimple_call_arg (stmt, adj->base_index);
2463
2464 VEC_quick_push (tree, vargs, arg);
2465 }
2466 else if (!adj->remove_param)
2467 {
2468 tree expr, base, off;
2469 location_t loc;
2470
2471 /* We create a new parameter out of the value of the old one, we can
2472 do the following kind of transformations:
2473
2474 - A scalar passed by reference is converted to a scalar passed by
2475 value. (adj->by_ref is false and the type of the original
2476 actual argument is a pointer to a scalar).
2477
2478 - A part of an aggregate is passed instead of the whole aggregate.
2479 The part can be passed either by value or by reference, this is
2480 determined by value of adj->by_ref. Moreover, the code below
2481 handles both situations when the original aggregate is passed by
2482 value (its type is not a pointer) and when it is passed by
2483 reference (it is a pointer to an aggregate).
2484
2485 When the new argument is passed by reference (adj->by_ref is true)
2486 it must be a part of an aggregate and therefore we form it by
2487 simply taking the address of a reference inside the original
2488 aggregate. */
2489
2490 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2491 base = gimple_call_arg (stmt, adj->base_index);
2492 loc = EXPR_LOCATION (base);
2493
2494 if (TREE_CODE (base) != ADDR_EXPR
2495 && POINTER_TYPE_P (TREE_TYPE (base)))
2496 off = build_int_cst (adj->alias_ptr_type,
2497 adj->offset / BITS_PER_UNIT);
2498 else
2499 {
2500 HOST_WIDE_INT base_offset;
2501 tree prev_base;
2502
2503 if (TREE_CODE (base) == ADDR_EXPR)
2504 base = TREE_OPERAND (base, 0);
2505 prev_base = base;
2506 base = get_addr_base_and_unit_offset (base, &base_offset);
2507 /* Aggregate arguments can have non-invariant addresses. */
2508 if (!base)
2509 {
2510 base = build_fold_addr_expr (prev_base);
2511 off = build_int_cst (adj->alias_ptr_type,
2512 adj->offset / BITS_PER_UNIT);
2513 }
2514 else if (TREE_CODE (base) == MEM_REF)
2515 {
2516 off = build_int_cst (adj->alias_ptr_type,
2517 base_offset
2518 + adj->offset / BITS_PER_UNIT);
2519 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2520 off);
2521 base = TREE_OPERAND (base, 0);
2522 }
2523 else
2524 {
2525 off = build_int_cst (adj->alias_ptr_type,
2526 base_offset
2527 + adj->offset / BITS_PER_UNIT);
2528 base = build_fold_addr_expr (base);
2529 }
2530 }
2531
2532 if (!adj->by_ref)
2533 {
2534 tree type = adj->type;
2535 unsigned int align;
2536 unsigned HOST_WIDE_INT misalign;
2537
2538 get_pointer_alignment_1 (base, &align, &misalign);
2539 misalign += (double_int_sext (tree_to_double_int (off),
2540 TYPE_PRECISION (TREE_TYPE (off))).low
2541 * BITS_PER_UNIT);
2542 misalign = misalign & (align - 1);
2543 if (misalign != 0)
2544 align = (misalign & -misalign);
2545 if (align < TYPE_ALIGN (type))
2546 type = build_aligned_type (type, align);
2547 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2548 }
2549 else
2550 {
2551 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2552 expr = build_fold_addr_expr (expr);
2553 }
2554
2555 expr = force_gimple_operand_gsi (&gsi, expr,
2556 adj->by_ref
2557 || is_gimple_reg_type (adj->type),
2558 NULL, true, GSI_SAME_STMT);
2559 VEC_quick_push (tree, vargs, expr);
2560 }
2561 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2562 {
2563 unsigned int ix;
2564 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2565 gimple def_temp;
2566
2567 arg = gimple_call_arg (stmt, adj->base_index);
2568 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2569 {
2570 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2571 continue;
2572 arg = fold_convert_loc (gimple_location (stmt),
2573 TREE_TYPE (origin), arg);
2574 }
2575 if (debug_args == NULL)
2576 debug_args = decl_debug_args_insert (callee_decl);
2577 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
2578 if (ddecl == origin)
2579 {
2580 ddecl = VEC_index (tree, *debug_args, ix + 1);
2581 break;
2582 }
2583 if (ddecl == NULL)
2584 {
2585 ddecl = make_node (DEBUG_EXPR_DECL);
2586 DECL_ARTIFICIAL (ddecl) = 1;
2587 TREE_TYPE (ddecl) = TREE_TYPE (origin);
2588 DECL_MODE (ddecl) = DECL_MODE (origin);
2589
2590 VEC_safe_push (tree, gc, *debug_args, origin);
2591 VEC_safe_push (tree, gc, *debug_args, ddecl);
2592 }
2593 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
2594 stmt);
2595 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
2596 }
2597 }
2598
2599 if (dump_file && (dump_flags & TDF_DETAILS))
2600 {
2601 fprintf (dump_file, "replacing stmt:");
2602 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2603 }
2604
2605 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2606 VEC_free (tree, heap, vargs);
2607 if (gimple_call_lhs (stmt))
2608 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2609
2610 gimple_set_block (new_stmt, gimple_block (stmt));
2611 if (gimple_has_location (stmt))
2612 gimple_set_location (new_stmt, gimple_location (stmt));
2613 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2614 gimple_call_copy_flags (new_stmt, stmt);
2615
2616 if (dump_file && (dump_flags & TDF_DETAILS))
2617 {
2618 fprintf (dump_file, "with stmt:");
2619 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2620 fprintf (dump_file, "\n");
2621 }
2622 gsi_replace (&gsi, new_stmt, true);
2623 if (cs)
2624 cgraph_set_call_stmt (cs, new_stmt);
2625 update_ssa (TODO_update_ssa);
2626 free_dominance_info (CDI_DOMINATORS);
2627 }
2628
2629 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2630
2631 static bool
2632 index_in_adjustments_multiple_times_p (int base_index,
2633 ipa_parm_adjustment_vec adjustments)
2634 {
2635 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2636 bool one = false;
2637
2638 for (i = 0; i < len; i++)
2639 {
2640 struct ipa_parm_adjustment *adj;
2641 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2642
2643 if (adj->base_index == base_index)
2644 {
2645 if (one)
2646 return true;
2647 else
2648 one = true;
2649 }
2650 }
2651 return false;
2652 }
2653
2654
2655 /* Return adjustments that should have the same effect on function parameters
2656 and call arguments as if they were first changed according to adjustments in
2657 INNER and then by adjustments in OUTER. */
2658
2659 ipa_parm_adjustment_vec
2660 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2661 ipa_parm_adjustment_vec outer)
2662 {
2663 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2664 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2665 int removals = 0;
2666 ipa_parm_adjustment_vec adjustments, tmp;
2667
2668 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2669 for (i = 0; i < inlen; i++)
2670 {
2671 struct ipa_parm_adjustment *n;
2672 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2673
2674 if (n->remove_param)
2675 removals++;
2676 else
2677 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2678 }
2679
2680 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2681 for (i = 0; i < outlen; i++)
2682 {
2683 struct ipa_parm_adjustment *r;
2684 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2685 outer, i);
2686 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2687 out->base_index);
2688
2689 gcc_assert (!in->remove_param);
2690 if (out->remove_param)
2691 {
2692 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2693 {
2694 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2695 memset (r, 0, sizeof (*r));
2696 r->remove_param = true;
2697 }
2698 continue;
2699 }
2700
2701 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2702 memset (r, 0, sizeof (*r));
2703 r->base_index = in->base_index;
2704 r->type = out->type;
2705
2706 /* FIXME: Create nonlocal value too. */
2707
2708 if (in->copy_param && out->copy_param)
2709 r->copy_param = true;
2710 else if (in->copy_param)
2711 r->offset = out->offset;
2712 else if (out->copy_param)
2713 r->offset = in->offset;
2714 else
2715 r->offset = in->offset + out->offset;
2716 }
2717
2718 for (i = 0; i < inlen; i++)
2719 {
2720 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2721 inner, i);
2722
2723 if (n->remove_param)
2724 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2725 }
2726
2727 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2728 return adjustments;
2729 }
2730
2731 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2732 friendly way, assuming they are meant to be applied to FNDECL. */
2733
2734 void
2735 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2736 tree fndecl)
2737 {
2738 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2739 bool first = true;
2740 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2741
2742 fprintf (file, "IPA param adjustments: ");
2743 for (i = 0; i < len; i++)
2744 {
2745 struct ipa_parm_adjustment *adj;
2746 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2747
2748 if (!first)
2749 fprintf (file, " ");
2750 else
2751 first = false;
2752
2753 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2754 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2755 if (adj->base)
2756 {
2757 fprintf (file, ", base: ");
2758 print_generic_expr (file, adj->base, 0);
2759 }
2760 if (adj->reduction)
2761 {
2762 fprintf (file, ", reduction: ");
2763 print_generic_expr (file, adj->reduction, 0);
2764 }
2765 if (adj->new_ssa_base)
2766 {
2767 fprintf (file, ", new_ssa_base: ");
2768 print_generic_expr (file, adj->new_ssa_base, 0);
2769 }
2770
2771 if (adj->copy_param)
2772 fprintf (file, ", copy_param");
2773 else if (adj->remove_param)
2774 fprintf (file, ", remove_param");
2775 else
2776 fprintf (file, ", offset %li", (long) adj->offset);
2777 if (adj->by_ref)
2778 fprintf (file, ", by_ref");
2779 print_node_brief (file, ", type: ", adj->type, 0);
2780 fprintf (file, "\n");
2781 }
2782 VEC_free (tree, heap, parms);
2783 }
2784
2785 /* Stream out jump function JUMP_FUNC to OB. */
2786
2787 static void
2788 ipa_write_jump_function (struct output_block *ob,
2789 struct ipa_jump_func *jump_func)
2790 {
2791 streamer_write_uhwi (ob, jump_func->type);
2792
2793 switch (jump_func->type)
2794 {
2795 case IPA_JF_UNKNOWN:
2796 break;
2797 case IPA_JF_KNOWN_TYPE:
2798 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
2799 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
2800 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
2801 break;
2802 case IPA_JF_CONST:
2803 stream_write_tree (ob, jump_func->value.constant, true);
2804 break;
2805 case IPA_JF_PASS_THROUGH:
2806 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
2807 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
2808 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
2809 break;
2810 case IPA_JF_ANCESTOR:
2811 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
2812 stream_write_tree (ob, jump_func->value.ancestor.type, true);
2813 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
2814 break;
2815 case IPA_JF_CONST_MEMBER_PTR:
2816 stream_write_tree (ob, jump_func->value.member_cst.pfn, true);
2817 stream_write_tree (ob, jump_func->value.member_cst.delta, false);
2818 break;
2819 }
2820 }
2821
2822 /* Read in jump function JUMP_FUNC from IB. */
2823
2824 static void
2825 ipa_read_jump_function (struct lto_input_block *ib,
2826 struct ipa_jump_func *jump_func,
2827 struct data_in *data_in)
2828 {
2829 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
2830
2831 switch (jump_func->type)
2832 {
2833 case IPA_JF_UNKNOWN:
2834 break;
2835 case IPA_JF_KNOWN_TYPE:
2836 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
2837 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
2838 jump_func->value.known_type.component_type = stream_read_tree (ib,
2839 data_in);
2840 break;
2841 case IPA_JF_CONST:
2842 jump_func->value.constant = stream_read_tree (ib, data_in);
2843 break;
2844 case IPA_JF_PASS_THROUGH:
2845 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
2846 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
2847 jump_func->value.pass_through.operation
2848 = (enum tree_code) streamer_read_uhwi (ib);
2849 break;
2850 case IPA_JF_ANCESTOR:
2851 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
2852 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
2853 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
2854 break;
2855 case IPA_JF_CONST_MEMBER_PTR:
2856 jump_func->value.member_cst.pfn = stream_read_tree (ib, data_in);
2857 jump_func->value.member_cst.delta = stream_read_tree (ib, data_in);
2858 break;
2859 }
2860 }
2861
2862 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2863 relevant to indirect inlining to OB. */
2864
2865 static void
2866 ipa_write_indirect_edge_info (struct output_block *ob,
2867 struct cgraph_edge *cs)
2868 {
2869 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2870 struct bitpack_d bp;
2871
2872 streamer_write_hwi (ob, ii->param_index);
2873 streamer_write_hwi (ob, ii->anc_offset);
2874 bp = bitpack_create (ob->main_stream);
2875 bp_pack_value (&bp, ii->polymorphic, 1);
2876 streamer_write_bitpack (&bp);
2877
2878 if (ii->polymorphic)
2879 {
2880 streamer_write_hwi (ob, ii->otr_token);
2881 stream_write_tree (ob, ii->otr_type, true);
2882 }
2883 }
2884
2885 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2886 relevant to indirect inlining from IB. */
2887
2888 static void
2889 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2890 struct data_in *data_in ATTRIBUTE_UNUSED,
2891 struct cgraph_edge *cs)
2892 {
2893 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2894 struct bitpack_d bp;
2895
2896 ii->param_index = (int) streamer_read_hwi (ib);
2897 ii->anc_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
2898 bp = streamer_read_bitpack (ib);
2899 ii->polymorphic = bp_unpack_value (&bp, 1);
2900 if (ii->polymorphic)
2901 {
2902 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
2903 ii->otr_type = stream_read_tree (ib, data_in);
2904 }
2905 }
2906
2907 /* Stream out NODE info to OB. */
2908
2909 static void
2910 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2911 {
2912 int node_ref;
2913 lto_cgraph_encoder_t encoder;
2914 struct ipa_node_params *info = IPA_NODE_REF (node);
2915 int j;
2916 struct cgraph_edge *e;
2917 struct bitpack_d bp;
2918
2919 encoder = ob->decl_state->cgraph_node_encoder;
2920 node_ref = lto_cgraph_encoder_encode (encoder, node);
2921 streamer_write_uhwi (ob, node_ref);
2922
2923 bp = bitpack_create (ob->main_stream);
2924 gcc_assert (info->uses_analysis_done
2925 || ipa_get_param_count (info) == 0);
2926 gcc_assert (!info->node_enqueued);
2927 gcc_assert (!info->ipcp_orig_node);
2928 for (j = 0; j < ipa_get_param_count (info); j++)
2929 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
2930 streamer_write_bitpack (&bp);
2931 for (e = node->callees; e; e = e->next_callee)
2932 {
2933 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2934
2935 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
2936 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2937 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2938 }
2939 for (e = node->indirect_calls; e; e = e->next_callee)
2940 {
2941 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2942
2943 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
2944 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2945 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2946 ipa_write_indirect_edge_info (ob, e);
2947 }
2948 }
2949
2950 /* Stream in NODE info from IB. */
2951
2952 static void
2953 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2954 struct data_in *data_in)
2955 {
2956 struct ipa_node_params *info = IPA_NODE_REF (node);
2957 int k;
2958 struct cgraph_edge *e;
2959 struct bitpack_d bp;
2960
2961 ipa_initialize_node_params (node);
2962
2963 bp = streamer_read_bitpack (ib);
2964 if (ipa_get_param_count (info) != 0)
2965 info->uses_analysis_done = true;
2966 info->node_enqueued = false;
2967 for (k = 0; k < ipa_get_param_count (info); k++)
2968 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
2969 for (e = node->callees; e; e = e->next_callee)
2970 {
2971 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2972 int count = streamer_read_uhwi (ib);
2973
2974 if (!count)
2975 continue;
2976 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
2977
2978 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2979 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2980 }
2981 for (e = node->indirect_calls; e; e = e->next_callee)
2982 {
2983 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2984 int count = streamer_read_uhwi (ib);
2985
2986 if (count)
2987 {
2988 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
2989 count);
2990 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2991 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
2992 data_in);
2993 }
2994 ipa_read_indirect_edge_info (ib, data_in, e);
2995 }
2996 }
2997
2998 /* Write jump functions for nodes in SET. */
2999
3000 void
3001 ipa_prop_write_jump_functions (cgraph_node_set set)
3002 {
3003 struct cgraph_node *node;
3004 struct output_block *ob;
3005 unsigned int count = 0;
3006 cgraph_node_set_iterator csi;
3007
3008 if (!ipa_node_params_vector)
3009 return;
3010
3011 ob = create_output_block (LTO_section_jump_functions);
3012 ob->cgraph_node = NULL;
3013 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
3014 {
3015 node = csi_node (csi);
3016 if (cgraph_function_with_gimple_body_p (node)
3017 && IPA_NODE_REF (node) != NULL)
3018 count++;
3019 }
3020
3021 streamer_write_uhwi (ob, count);
3022
3023 /* Process all of the functions. */
3024 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
3025 {
3026 node = csi_node (csi);
3027 if (cgraph_function_with_gimple_body_p (node)
3028 && IPA_NODE_REF (node) != NULL)
3029 ipa_write_node_info (ob, node);
3030 }
3031 streamer_write_char_stream (ob->main_stream, 0);
3032 produce_asm (ob, NULL);
3033 destroy_output_block (ob);
3034 }
3035
3036 /* Read section in file FILE_DATA of length LEN with data DATA. */
3037
3038 static void
3039 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3040 size_t len)
3041 {
3042 const struct lto_function_header *header =
3043 (const struct lto_function_header *) data;
3044 const int cfg_offset = sizeof (struct lto_function_header);
3045 const int main_offset = cfg_offset + header->cfg_size;
3046 const int string_offset = main_offset + header->main_size;
3047 struct data_in *data_in;
3048 struct lto_input_block ib_main;
3049 unsigned int i;
3050 unsigned int count;
3051
3052 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3053 header->main_size);
3054
3055 data_in =
3056 lto_data_in_create (file_data, (const char *) data + string_offset,
3057 header->string_size, NULL);
3058 count = streamer_read_uhwi (&ib_main);
3059
3060 for (i = 0; i < count; i++)
3061 {
3062 unsigned int index;
3063 struct cgraph_node *node;
3064 lto_cgraph_encoder_t encoder;
3065
3066 index = streamer_read_uhwi (&ib_main);
3067 encoder = file_data->cgraph_node_encoder;
3068 node = lto_cgraph_encoder_deref (encoder, index);
3069 gcc_assert (node->analyzed);
3070 ipa_read_node_info (&ib_main, node, data_in);
3071 }
3072 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3073 len);
3074 lto_data_in_delete (data_in);
3075 }
3076
3077 /* Read ipcp jump functions. */
3078
3079 void
3080 ipa_prop_read_jump_functions (void)
3081 {
3082 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3083 struct lto_file_decl_data *file_data;
3084 unsigned int j = 0;
3085
3086 ipa_check_create_node_params ();
3087 ipa_check_create_edge_args ();
3088 ipa_register_cgraph_hooks ();
3089
3090 while ((file_data = file_data_vec[j++]))
3091 {
3092 size_t len;
3093 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3094
3095 if (data)
3096 ipa_prop_read_section (file_data, data, len);
3097 }
3098 }
3099
3100 /* After merging units, we can get mismatch in argument counts.
3101 Also decl merging might've rendered parameter lists obsolete.
3102 Also compute called_with_variable_arg info. */
3103
3104 void
3105 ipa_update_after_lto_read (void)
3106 {
3107 struct cgraph_node *node;
3108
3109 ipa_check_create_node_params ();
3110 ipa_check_create_edge_args ();
3111
3112 FOR_EACH_DEFINED_FUNCTION (node)
3113 if (node->analyzed)
3114 ipa_initialize_node_params (node);
3115 }