ipa-cp.c (ipcp_discover_new_direct_edges): If something was turned to direct call...
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "ipa-inline.h"
34 #include "gimple.h"
35 #include "flags.h"
36 #include "diagnostic.h"
37 #include "gimple-pretty-print.h"
38 #include "lto-streamer.h"
39 #include "data-streamer.h"
40 #include "tree-streamer.h"
41 #include "params.h"
42
43
44 /* Intermediate information about a parameter that is only useful during the
45 run of ipa_analyze_node and is not kept afterwards. */
46
47 struct param_analysis_info
48 {
49 bool parm_modified, ref_modified, pt_modified;
50 bitmap parm_visited_statements, pt_visited_statements;
51 };
52
53 /* Vector where the parameter infos are actually stored. */
54 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
55 /* Vector where the parameter infos are actually stored. */
56 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
57
58 /* Holders of ipa cgraph hooks: */
59 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
60 static struct cgraph_node_hook_list *node_removal_hook_holder;
61 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
62 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
63 static struct cgraph_node_hook_list *function_insertion_hook_holder;
64
65 /* Return index of the formal whose tree is PTREE in function which corresponds
66 to INFO. */
67
68 int
69 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
70 {
71 int i, count;
72
73 count = ipa_get_param_count (info);
74 for (i = 0; i < count; i++)
75 if (ipa_get_param (info, i) == ptree)
76 return i;
77
78 return -1;
79 }
80
81 /* Populate the param_decl field in parameter descriptors of INFO that
82 corresponds to NODE. */
83
84 static void
85 ipa_populate_param_decls (struct cgraph_node *node,
86 struct ipa_node_params *info)
87 {
88 tree fndecl;
89 tree fnargs;
90 tree parm;
91 int param_num;
92
93 fndecl = node->symbol.decl;
94 fnargs = DECL_ARGUMENTS (fndecl);
95 param_num = 0;
96 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
97 {
98 VEC_index (ipa_param_descriptor_t,
99 info->descriptors, param_num).decl = parm;
100 param_num++;
101 }
102 }
103
104 /* Return how many formal parameters FNDECL has. */
105
106 static inline int
107 count_formal_params (tree fndecl)
108 {
109 tree parm;
110 int count = 0;
111
112 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
113 count++;
114
115 return count;
116 }
117
118 /* Initialize the ipa_node_params structure associated with NODE by counting
119 the function parameters, creating the descriptors and populating their
120 param_decls. */
121
122 void
123 ipa_initialize_node_params (struct cgraph_node *node)
124 {
125 struct ipa_node_params *info = IPA_NODE_REF (node);
126
127 if (!info->descriptors)
128 {
129 int param_count;
130
131 param_count = count_formal_params (node->symbol.decl);
132 if (param_count)
133 {
134 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
135 info->descriptors, param_count);
136 ipa_populate_param_decls (node, info);
137 }
138 }
139 }
140
141 /* Print the jump functions associated with call graph edge CS to file F. */
142
143 static void
144 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
145 {
146 int i, count;
147
148 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
149 for (i = 0; i < count; i++)
150 {
151 struct ipa_jump_func *jump_func;
152 enum jump_func_type type;
153
154 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
155 type = jump_func->type;
156
157 fprintf (f, " param %d: ", i);
158 if (type == IPA_JF_UNKNOWN)
159 fprintf (f, "UNKNOWN\n");
160 else if (type == IPA_JF_KNOWN_TYPE)
161 {
162 fprintf (f, "KNOWN TYPE: base ");
163 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
164 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
165 jump_func->value.known_type.offset);
166 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
167 fprintf (f, "\n");
168 }
169 else if (type == IPA_JF_CONST)
170 {
171 tree val = jump_func->value.constant;
172 fprintf (f, "CONST: ");
173 print_generic_expr (f, val, 0);
174 if (TREE_CODE (val) == ADDR_EXPR
175 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
176 {
177 fprintf (f, " -> ");
178 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
179 0);
180 }
181 fprintf (f, "\n");
182 }
183 else if (type == IPA_JF_PASS_THROUGH)
184 {
185 fprintf (f, "PASS THROUGH: ");
186 fprintf (f, "%d, op %s",
187 jump_func->value.pass_through.formal_id,
188 tree_code_name[(int)
189 jump_func->value.pass_through.operation]);
190 if (jump_func->value.pass_through.operation != NOP_EXPR)
191 {
192 fprintf (f, " ");
193 print_generic_expr (f,
194 jump_func->value.pass_through.operand, 0);
195 }
196 if (jump_func->value.pass_through.agg_preserved)
197 fprintf (f, ", agg_preserved");
198 fprintf (f, "\n");
199 }
200 else if (type == IPA_JF_ANCESTOR)
201 {
202 fprintf (f, "ANCESTOR: ");
203 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
204 jump_func->value.ancestor.formal_id,
205 jump_func->value.ancestor.offset);
206 print_generic_expr (f, jump_func->value.ancestor.type, 0);
207 if (jump_func->value.ancestor.agg_preserved)
208 fprintf (f, ", agg_preserved");
209 fprintf (f, "\n");
210 }
211
212 if (jump_func->agg.items)
213 {
214 struct ipa_agg_jf_item *item;
215 int j;
216
217 fprintf (f, " Aggregate passed by %s:\n",
218 jump_func->agg.by_ref ? "reference" : "value");
219 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items,
220 j, item)
221 {
222 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
223 item->offset);
224 if (TYPE_P (item->value))
225 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
226 tree_low_cst (TYPE_SIZE (item->value), 1));
227 else
228 {
229 fprintf (f, "cst: ");
230 print_generic_expr (f, item->value, 0);
231 }
232 fprintf (f, "\n");
233 }
234 }
235 }
236 }
237
238
239 /* Print the jump functions of all arguments on all call graph edges going from
240 NODE to file F. */
241
242 void
243 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
244 {
245 struct cgraph_edge *cs;
246 int i;
247
248 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
249 for (cs = node->callees; cs; cs = cs->next_callee)
250 {
251 if (!ipa_edge_args_info_available_for_edge_p (cs))
252 continue;
253
254 fprintf (f, " callsite %s/%i -> %s/%i : \n",
255 xstrdup (cgraph_node_name (node)), node->uid,
256 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
257 ipa_print_node_jump_functions_for_edge (f, cs);
258 }
259
260 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
261 {
262 if (!ipa_edge_args_info_available_for_edge_p (cs))
263 continue;
264
265 if (cs->call_stmt)
266 {
267 fprintf (f, " indirect callsite %d for stmt ", i);
268 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
269 }
270 else
271 fprintf (f, " indirect callsite %d :\n", i);
272 ipa_print_node_jump_functions_for_edge (f, cs);
273
274 }
275 }
276
277 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
278
279 void
280 ipa_print_all_jump_functions (FILE *f)
281 {
282 struct cgraph_node *node;
283
284 fprintf (f, "\nJump functions:\n");
285 FOR_EACH_FUNCTION (node)
286 {
287 ipa_print_node_jump_functions (f, node);
288 }
289 }
290
291 /* Worker for prune_expression_for_jf. */
292
293 static tree
294 prune_expression_for_jf_1 (tree *tp, int *walk_subtrees, void *)
295 {
296 if (EXPR_P (*tp))
297 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
298 else
299 *walk_subtrees = 0;
300 return NULL_TREE;
301 }
302
303 /* Return the expression tree EXPR unshared and with location stripped off. */
304
305 static tree
306 prune_expression_for_jf (tree exp)
307 {
308 if (EXPR_P (exp))
309 {
310 exp = unshare_expr (exp);
311 walk_tree (&exp, prune_expression_for_jf_1, NULL, NULL);
312 }
313 return exp;
314 }
315
316 /* Set JFUNC to be a known type jump function. */
317
318 static void
319 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
320 tree base_type, tree component_type)
321 {
322 jfunc->type = IPA_JF_KNOWN_TYPE;
323 jfunc->value.known_type.offset = offset,
324 jfunc->value.known_type.base_type = base_type;
325 jfunc->value.known_type.component_type = component_type;
326 }
327
328 /* Set JFUNC to be a constant jmp function. */
329
330 static void
331 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
332 {
333 constant = unshare_expr (constant);
334 if (constant && EXPR_P (constant))
335 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
336 jfunc->type = IPA_JF_CONST;
337 jfunc->value.constant = prune_expression_for_jf (constant);
338 }
339
340 /* Set JFUNC to be a simple pass-through jump function. */
341 static void
342 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
343 bool agg_preserved)
344 {
345 jfunc->type = IPA_JF_PASS_THROUGH;
346 jfunc->value.pass_through.operand = NULL_TREE;
347 jfunc->value.pass_through.formal_id = formal_id;
348 jfunc->value.pass_through.operation = NOP_EXPR;
349 jfunc->value.pass_through.agg_preserved = agg_preserved;
350 }
351
352 /* Set JFUNC to be an arithmetic pass through jump function. */
353
354 static void
355 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
356 tree operand, enum tree_code operation)
357 {
358 jfunc->type = IPA_JF_PASS_THROUGH;
359 jfunc->value.pass_through.operand = prune_expression_for_jf (operand);
360 jfunc->value.pass_through.formal_id = formal_id;
361 jfunc->value.pass_through.operation = operation;
362 jfunc->value.pass_through.agg_preserved = false;
363 }
364
365 /* Set JFUNC to be an ancestor jump function. */
366
367 static void
368 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
369 tree type, int formal_id, bool agg_preserved)
370 {
371 jfunc->type = IPA_JF_ANCESTOR;
372 jfunc->value.ancestor.formal_id = formal_id;
373 jfunc->value.ancestor.offset = offset;
374 jfunc->value.ancestor.type = type;
375 jfunc->value.ancestor.agg_preserved = agg_preserved;
376 }
377
378 /* Structure to be passed in between detect_type_change and
379 check_stmt_for_type_change. */
380
381 struct type_change_info
382 {
383 /* Offset into the object where there is the virtual method pointer we are
384 looking for. */
385 HOST_WIDE_INT offset;
386 /* The declaration or SSA_NAME pointer of the base that we are checking for
387 type change. */
388 tree object;
389 /* If we actually can tell the type that the object has changed to, it is
390 stored in this field. Otherwise it remains NULL_TREE. */
391 tree known_current_type;
392 /* Set to true if dynamic type change has been detected. */
393 bool type_maybe_changed;
394 /* Set to true if multiple types have been encountered. known_current_type
395 must be disregarded in that case. */
396 bool multiple_types_encountered;
397 };
398
399 /* Return true if STMT can modify a virtual method table pointer.
400
401 This function makes special assumptions about both constructors and
402 destructors which are all the functions that are allowed to alter the VMT
403 pointers. It assumes that destructors begin with assignment into all VMT
404 pointers and that constructors essentially look in the following way:
405
406 1) The very first thing they do is that they call constructors of ancestor
407 sub-objects that have them.
408
409 2) Then VMT pointers of this and all its ancestors is set to new values
410 corresponding to the type corresponding to the constructor.
411
412 3) Only afterwards, other stuff such as constructor of member sub-objects
413 and the code written by the user is run. Only this may include calling
414 virtual functions, directly or indirectly.
415
416 There is no way to call a constructor of an ancestor sub-object in any
417 other way.
418
419 This means that we do not have to care whether constructors get the correct
420 type information because they will always change it (in fact, if we define
421 the type to be given by the VMT pointer, it is undefined).
422
423 The most important fact to derive from the above is that if, for some
424 statement in the section 3, we try to detect whether the dynamic type has
425 changed, we can safely ignore all calls as we examine the function body
426 backwards until we reach statements in section 2 because these calls cannot
427 be ancestor constructors or destructors (if the input is not bogus) and so
428 do not change the dynamic type (this holds true only for automatically
429 allocated objects but at the moment we devirtualize only these). We then
430 must detect that statements in section 2 change the dynamic type and can try
431 to derive the new type. That is enough and we can stop, we will never see
432 the calls into constructors of sub-objects in this code. Therefore we can
433 safely ignore all call statements that we traverse.
434 */
435
436 static bool
437 stmt_may_be_vtbl_ptr_store (gimple stmt)
438 {
439 if (is_gimple_call (stmt))
440 return false;
441 else if (is_gimple_assign (stmt))
442 {
443 tree lhs = gimple_assign_lhs (stmt);
444
445 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
446 {
447 if (flag_strict_aliasing
448 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
449 return false;
450
451 if (TREE_CODE (lhs) == COMPONENT_REF
452 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
453 return false;
454 /* In the future we might want to use get_base_ref_and_offset to find
455 if there is a field corresponding to the offset and if so, proceed
456 almost like if it was a component ref. */
457 }
458 }
459 return true;
460 }
461
462 /* If STMT can be proved to be an assignment to the virtual method table
463 pointer of ANALYZED_OBJ and the type associated with the new table
464 identified, return the type. Otherwise return NULL_TREE. */
465
466 static tree
467 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
468 {
469 HOST_WIDE_INT offset, size, max_size;
470 tree lhs, rhs, base;
471
472 if (!gimple_assign_single_p (stmt))
473 return NULL_TREE;
474
475 lhs = gimple_assign_lhs (stmt);
476 rhs = gimple_assign_rhs1 (stmt);
477 if (TREE_CODE (lhs) != COMPONENT_REF
478 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
479 || TREE_CODE (rhs) != ADDR_EXPR)
480 return NULL_TREE;
481 rhs = get_base_address (TREE_OPERAND (rhs, 0));
482 if (!rhs
483 || TREE_CODE (rhs) != VAR_DECL
484 || !DECL_VIRTUAL_P (rhs))
485 return NULL_TREE;
486
487 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
488 if (offset != tci->offset
489 || size != POINTER_SIZE
490 || max_size != POINTER_SIZE)
491 return NULL_TREE;
492 if (TREE_CODE (base) == MEM_REF)
493 {
494 if (TREE_CODE (tci->object) != MEM_REF
495 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
496 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
497 TREE_OPERAND (base, 1)))
498 return NULL_TREE;
499 }
500 else if (tci->object != base)
501 return NULL_TREE;
502
503 return DECL_CONTEXT (rhs);
504 }
505
506 /* Callback of walk_aliased_vdefs and a helper function for
507 detect_type_change to check whether a particular statement may modify
508 the virtual table pointer, and if possible also determine the new type of
509 the (sub-)object. It stores its result into DATA, which points to a
510 type_change_info structure. */
511
512 static bool
513 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
514 {
515 gimple stmt = SSA_NAME_DEF_STMT (vdef);
516 struct type_change_info *tci = (struct type_change_info *) data;
517
518 if (stmt_may_be_vtbl_ptr_store (stmt))
519 {
520 tree type;
521 type = extr_type_from_vtbl_ptr_store (stmt, tci);
522 if (tci->type_maybe_changed
523 && type != tci->known_current_type)
524 tci->multiple_types_encountered = true;
525 tci->known_current_type = type;
526 tci->type_maybe_changed = true;
527 return true;
528 }
529 else
530 return false;
531 }
532
533
534
535 /* Like detect_type_change but with extra argument COMP_TYPE which will become
536 the component type part of new JFUNC of dynamic type change is detected and
537 the new base type is identified. */
538
539 static bool
540 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
541 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
542 {
543 struct type_change_info tci;
544 ao_ref ao;
545
546 gcc_checking_assert (DECL_P (arg)
547 || TREE_CODE (arg) == MEM_REF
548 || handled_component_p (arg));
549 /* Const calls cannot call virtual methods through VMT and so type changes do
550 not matter. */
551 if (!flag_devirtualize || !gimple_vuse (call))
552 return false;
553
554 ao_ref_init (&ao, arg);
555 ao.base = base;
556 ao.offset = offset;
557 ao.size = POINTER_SIZE;
558 ao.max_size = ao.size;
559
560 tci.offset = offset;
561 tci.object = get_base_address (arg);
562 tci.known_current_type = NULL_TREE;
563 tci.type_maybe_changed = false;
564 tci.multiple_types_encountered = false;
565
566 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
567 &tci, NULL);
568 if (!tci.type_maybe_changed)
569 return false;
570
571 if (!tci.known_current_type
572 || tci.multiple_types_encountered
573 || offset != 0)
574 jfunc->type = IPA_JF_UNKNOWN;
575 else
576 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
577
578 return true;
579 }
580
581 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
582 looking for assignments to its virtual table pointer. If it is, return true
583 and fill in the jump function JFUNC with relevant type information or set it
584 to unknown. ARG is the object itself (not a pointer to it, unless
585 dereferenced). BASE is the base of the memory access as returned by
586 get_ref_base_and_extent, as is the offset. */
587
588 static bool
589 detect_type_change (tree arg, tree base, gimple call,
590 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
591 {
592 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
593 }
594
595 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
596 SSA name (its dereference will become the base and the offset is assumed to
597 be zero). */
598
599 static bool
600 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
601 {
602 tree comp_type;
603
604 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
605 if (!flag_devirtualize
606 || !POINTER_TYPE_P (TREE_TYPE (arg))
607 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
608 return false;
609
610 comp_type = TREE_TYPE (TREE_TYPE (arg));
611 arg = build2 (MEM_REF, ptr_type_node, arg,
612 build_int_cst (ptr_type_node, 0));
613
614 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
615 }
616
617 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
618 boolean variable pointed to by DATA. */
619
620 static bool
621 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
622 void *data)
623 {
624 bool *b = (bool *) data;
625 *b = true;
626 return true;
627 }
628
629 /* Return true if a load from a formal parameter PARM_LOAD is known to retreive
630 a value known not to be modified in this function before reaching the
631 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
632 information about the parameter. */
633
634 static bool
635 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
636 gimple stmt, tree parm_load)
637 {
638 bool modified = false;
639 bitmap *visited_stmts;
640 ao_ref refd;
641
642 if (parm_ainfo && parm_ainfo->parm_modified)
643 return false;
644
645 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
646 ao_ref_init (&refd, parm_load);
647 /* We can cache visited statements only when parm_ainfo is available and when
648 we are looking at a naked load of the whole parameter. */
649 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
650 visited_stmts = NULL;
651 else
652 visited_stmts = &parm_ainfo->parm_visited_statements;
653 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
654 visited_stmts);
655 if (parm_ainfo && modified)
656 parm_ainfo->parm_modified = true;
657 return !modified;
658 }
659
660 /* If STMT is an assignment that loads a value from an parameter declaration,
661 return the index of the parameter in ipa_node_params which has not been
662 modified. Otherwise return -1. */
663
664 static int
665 load_from_unmodified_param (struct ipa_node_params *info,
666 struct param_analysis_info *parms_ainfo,
667 gimple stmt)
668 {
669 int index;
670 tree op1;
671
672 if (!gimple_assign_single_p (stmt))
673 return -1;
674
675 op1 = gimple_assign_rhs1 (stmt);
676 if (TREE_CODE (op1) != PARM_DECL)
677 return -1;
678
679 index = ipa_get_param_decl_index (info, op1);
680 if (index < 0
681 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
682 : NULL, stmt, op1))
683 return -1;
684
685 return index;
686 }
687
688 /* Return true if memory reference REF loads data that are known to be
689 unmodified in this function before reaching statement STMT. PARM_AINFO, if
690 non-NULL, is a pointer to a structure containing temporary information about
691 PARM. */
692
693 static bool
694 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
695 gimple stmt, tree ref)
696 {
697 bool modified = false;
698 ao_ref refd;
699
700 gcc_checking_assert (gimple_vuse (stmt));
701 if (parm_ainfo && parm_ainfo->ref_modified)
702 return false;
703
704 ao_ref_init (&refd, ref);
705 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
706 NULL);
707 if (parm_ainfo && modified)
708 parm_ainfo->ref_modified = true;
709 return !modified;
710 }
711
712 /* Return true if the data pointed to by PARM is known to be unmodified in this
713 function before reaching call statement CALL into which it is passed.
714 PARM_AINFO is a pointer to a structure containing temporary information
715 about PARM. */
716
717 static bool
718 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
719 gimple call, tree parm)
720 {
721 bool modified = false;
722 ao_ref refd;
723
724 /* It's unnecessary to calculate anything about memory contnets for a const
725 function because it is not goin to use it. But do not cache the result
726 either. Also, no such calculations for non-pointers. */
727 if (!gimple_vuse (call)
728 || !POINTER_TYPE_P (TREE_TYPE (parm)))
729 return false;
730
731 if (parm_ainfo->pt_modified)
732 return false;
733
734 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
735 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
736 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
737 if (modified)
738 parm_ainfo->pt_modified = true;
739 return !modified;
740 }
741
742 /* Return true if we can prove that OP is a memory reference loading unmodified
743 data from an aggregate passed as a parameter and if the aggregate is passed
744 by reference, that the alias type of the load corresponds to the type of the
745 formal parameter (so that we can rely on this type for TBAA in callers).
746 INFO and PARMS_AINFO describe parameters of the current function (but the
747 latter can be NULL), STMT is the load statement. If function returns true,
748 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
749 within the aggregate and whether it is a load from a value passed by
750 reference respectively. */
751
752 static bool
753 ipa_load_from_parm_agg_1 (struct ipa_node_params *info,
754 struct param_analysis_info *parms_ainfo, gimple stmt,
755 tree op, int *index_p, HOST_WIDE_INT *offset_p,
756 bool *by_ref_p)
757 {
758 int index;
759 HOST_WIDE_INT size, max_size;
760 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
761
762 if (max_size == -1 || max_size != size || *offset_p < 0)
763 return false;
764
765 if (DECL_P (base))
766 {
767 int index = ipa_get_param_decl_index (info, base);
768 if (index >= 0
769 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
770 : NULL, stmt, op))
771 {
772 *index_p = index;
773 *by_ref_p = false;
774 return true;
775 }
776 return false;
777 }
778
779 if (TREE_CODE (base) != MEM_REF
780 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
781 || !integer_zerop (TREE_OPERAND (base, 1)))
782 return false;
783
784 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
785 {
786 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
787 index = ipa_get_param_decl_index (info, parm);
788 }
789 else
790 {
791 /* This branch catches situations where a pointer parameter is not a
792 gimple register, for example:
793
794 void hip7(S*) (struct S * p)
795 {
796 void (*<T2e4>) (struct S *) D.1867;
797 struct S * p.1;
798
799 <bb 2>:
800 p.1_1 = p;
801 D.1867_2 = p.1_1->f;
802 D.1867_2 ();
803 gdp = &p;
804 */
805
806 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
807 index = load_from_unmodified_param (info, parms_ainfo, def);
808 }
809
810 if (index >= 0
811 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
812 stmt, op))
813 {
814 *index_p = index;
815 *by_ref_p = true;
816 return true;
817 }
818 return false;
819 }
820
821 /* Just like the previous function, just without the param_analysis_info
822 pointer, for users outside of this file. */
823
824 bool
825 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
826 tree op, int *index_p, HOST_WIDE_INT *offset_p,
827 bool *by_ref_p)
828 {
829 return ipa_load_from_parm_agg_1 (info, NULL, stmt, op, index_p, offset_p,
830 by_ref_p);
831 }
832
833 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
834 of an assignment statement STMT, try to determine whether we are actually
835 handling any of the following cases and construct an appropriate jump
836 function into JFUNC if so:
837
838 1) The passed value is loaded from a formal parameter which is not a gimple
839 register (most probably because it is addressable, the value has to be
840 scalar) and we can guarantee the value has not changed. This case can
841 therefore be described by a simple pass-through jump function. For example:
842
843 foo (int a)
844 {
845 int a.0;
846
847 a.0_2 = a;
848 bar (a.0_2);
849
850 2) The passed value can be described by a simple arithmetic pass-through
851 jump function. E.g.
852
853 foo (int a)
854 {
855 int D.2064;
856
857 D.2064_4 = a.1(D) + 4;
858 bar (D.2064_4);
859
860 This case can also occur in combination of the previous one, e.g.:
861
862 foo (int a, int z)
863 {
864 int a.0;
865 int D.2064;
866
867 a.0_3 = a;
868 D.2064_4 = a.0_3 + 4;
869 foo (D.2064_4);
870
871 3) The passed value is an address of an object within another one (which
872 also passed by reference). Such situations are described by an ancestor
873 jump function and describe situations such as:
874
875 B::foo() (struct B * const this)
876 {
877 struct A * D.1845;
878
879 D.1845_2 = &this_1(D)->D.1748;
880 A::bar (D.1845_2);
881
882 INFO is the structure describing individual parameters access different
883 stages of IPA optimizations. PARMS_AINFO contains the information that is
884 only needed for intraprocedural analysis. */
885
886 static void
887 compute_complex_assign_jump_func (struct ipa_node_params *info,
888 struct param_analysis_info *parms_ainfo,
889 struct ipa_jump_func *jfunc,
890 gimple call, gimple stmt, tree name)
891 {
892 HOST_WIDE_INT offset, size, max_size;
893 tree op1, tc_ssa, base, ssa;
894 int index;
895
896 op1 = gimple_assign_rhs1 (stmt);
897
898 if (TREE_CODE (op1) == SSA_NAME)
899 {
900 if (SSA_NAME_IS_DEFAULT_DEF (op1))
901 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
902 else
903 index = load_from_unmodified_param (info, parms_ainfo,
904 SSA_NAME_DEF_STMT (op1));
905 tc_ssa = op1;
906 }
907 else
908 {
909 index = load_from_unmodified_param (info, parms_ainfo, stmt);
910 tc_ssa = gimple_assign_lhs (stmt);
911 }
912
913 if (index >= 0)
914 {
915 tree op2 = gimple_assign_rhs2 (stmt);
916
917 if (op2)
918 {
919 if (!is_gimple_ip_invariant (op2)
920 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
921 && !useless_type_conversion_p (TREE_TYPE (name),
922 TREE_TYPE (op1))))
923 return;
924
925 ipa_set_jf_arith_pass_through (jfunc, index, op2,
926 gimple_assign_rhs_code (stmt));
927 }
928 else if (gimple_assign_single_p (stmt)
929 && !detect_type_change_ssa (tc_ssa, call, jfunc))
930 {
931 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
932 call, tc_ssa);
933 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
934 }
935 return;
936 }
937
938 if (TREE_CODE (op1) != ADDR_EXPR)
939 return;
940 op1 = TREE_OPERAND (op1, 0);
941 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
942 return;
943 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
944 if (TREE_CODE (base) != MEM_REF
945 /* If this is a varying address, punt. */
946 || max_size == -1
947 || max_size != size)
948 return;
949 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
950 ssa = TREE_OPERAND (base, 0);
951 if (TREE_CODE (ssa) != SSA_NAME
952 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
953 || offset < 0)
954 return;
955
956 /* Dynamic types are changed only in constructors and destructors and */
957 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
958 if (index >= 0
959 && !detect_type_change (op1, base, call, jfunc, offset))
960 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
961 parm_ref_data_pass_through_p (&parms_ainfo[index],
962 call, ssa));
963 }
964
965 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
966 it looks like:
967
968 iftmp.1_3 = &obj_2(D)->D.1762;
969
970 The base of the MEM_REF must be a default definition SSA NAME of a
971 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
972 whole MEM_REF expression is returned and the offset calculated from any
973 handled components and the MEM_REF itself is stored into *OFFSET. The whole
974 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
975
976 static tree
977 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
978 {
979 HOST_WIDE_INT size, max_size;
980 tree expr, parm, obj;
981
982 if (!gimple_assign_single_p (assign))
983 return NULL_TREE;
984 expr = gimple_assign_rhs1 (assign);
985
986 if (TREE_CODE (expr) != ADDR_EXPR)
987 return NULL_TREE;
988 expr = TREE_OPERAND (expr, 0);
989 obj = expr;
990 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
991
992 if (TREE_CODE (expr) != MEM_REF
993 /* If this is a varying address, punt. */
994 || max_size == -1
995 || max_size != size
996 || *offset < 0)
997 return NULL_TREE;
998 parm = TREE_OPERAND (expr, 0);
999 if (TREE_CODE (parm) != SSA_NAME
1000 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1001 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1002 return NULL_TREE;
1003
1004 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1005 *obj_p = obj;
1006 return expr;
1007 }
1008
1009
1010 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1011 statement PHI, try to find out whether NAME is in fact a
1012 multiple-inheritance typecast from a descendant into an ancestor of a formal
1013 parameter and thus can be described by an ancestor jump function and if so,
1014 write the appropriate function into JFUNC.
1015
1016 Essentially we want to match the following pattern:
1017
1018 if (obj_2(D) != 0B)
1019 goto <bb 3>;
1020 else
1021 goto <bb 4>;
1022
1023 <bb 3>:
1024 iftmp.1_3 = &obj_2(D)->D.1762;
1025
1026 <bb 4>:
1027 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1028 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1029 return D.1879_6; */
1030
1031 static void
1032 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1033 struct param_analysis_info *parms_ainfo,
1034 struct ipa_jump_func *jfunc,
1035 gimple call, gimple phi)
1036 {
1037 HOST_WIDE_INT offset;
1038 gimple assign, cond;
1039 basic_block phi_bb, assign_bb, cond_bb;
1040 tree tmp, parm, expr, obj;
1041 int index, i;
1042
1043 if (gimple_phi_num_args (phi) != 2)
1044 return;
1045
1046 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1047 tmp = PHI_ARG_DEF (phi, 0);
1048 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1049 tmp = PHI_ARG_DEF (phi, 1);
1050 else
1051 return;
1052 if (TREE_CODE (tmp) != SSA_NAME
1053 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1054 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1055 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1056 return;
1057
1058 assign = SSA_NAME_DEF_STMT (tmp);
1059 assign_bb = gimple_bb (assign);
1060 if (!single_pred_p (assign_bb))
1061 return;
1062 expr = get_ancestor_addr_info (assign, &obj, &offset);
1063 if (!expr)
1064 return;
1065 parm = TREE_OPERAND (expr, 0);
1066 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1067 gcc_assert (index >= 0);
1068
1069 cond_bb = single_pred (assign_bb);
1070 cond = last_stmt (cond_bb);
1071 if (!cond
1072 || gimple_code (cond) != GIMPLE_COND
1073 || gimple_cond_code (cond) != NE_EXPR
1074 || gimple_cond_lhs (cond) != parm
1075 || !integer_zerop (gimple_cond_rhs (cond)))
1076 return;
1077
1078 phi_bb = gimple_bb (phi);
1079 for (i = 0; i < 2; i++)
1080 {
1081 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1082 if (pred != assign_bb && pred != cond_bb)
1083 return;
1084 }
1085
1086 if (!detect_type_change (obj, expr, call, jfunc, offset))
1087 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1088 parm_ref_data_pass_through_p (&parms_ainfo[index],
1089 call, parm));
1090 }
1091
1092 /* Given OP which is passed as an actual argument to a called function,
1093 determine if it is possible to construct a KNOWN_TYPE jump function for it
1094 and if so, create one and store it to JFUNC. */
1095
1096 static void
1097 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1098 gimple call)
1099 {
1100 HOST_WIDE_INT offset, size, max_size;
1101 tree base;
1102
1103 if (!flag_devirtualize
1104 || TREE_CODE (op) != ADDR_EXPR
1105 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1106 return;
1107
1108 op = TREE_OPERAND (op, 0);
1109 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1110 if (!DECL_P (base)
1111 || max_size == -1
1112 || max_size != size
1113 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1114 || is_global_var (base))
1115 return;
1116
1117 if (!TYPE_BINFO (TREE_TYPE (base))
1118 || detect_type_change (op, base, call, jfunc, offset))
1119 return;
1120
1121 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1122 }
1123
1124 /* Inspect the given TYPE and return true iff it has the same structure (the
1125 same number of fields of the same types) as a C++ member pointer. If
1126 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1127 corresponding fields there. */
1128
1129 static bool
1130 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1131 {
1132 tree fld;
1133
1134 if (TREE_CODE (type) != RECORD_TYPE)
1135 return false;
1136
1137 fld = TYPE_FIELDS (type);
1138 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1139 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1140 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1141 return false;
1142
1143 if (method_ptr)
1144 *method_ptr = fld;
1145
1146 fld = DECL_CHAIN (fld);
1147 if (!fld || INTEGRAL_TYPE_P (fld)
1148 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1149 return false;
1150 if (delta)
1151 *delta = fld;
1152
1153 if (DECL_CHAIN (fld))
1154 return false;
1155
1156 return true;
1157 }
1158
1159 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1160 return the rhs of its defining statement. Otherwise return RHS as it
1161 is. */
1162
1163 static inline tree
1164 get_ssa_def_if_simple_copy (tree rhs)
1165 {
1166 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1167 {
1168 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1169
1170 if (gimple_assign_single_p (def_stmt))
1171 rhs = gimple_assign_rhs1 (def_stmt);
1172 else
1173 break;
1174 }
1175 return rhs;
1176 }
1177
1178 /* Simple linked list, describing known contents of an aggregate beforere
1179 call. */
1180
1181 struct ipa_known_agg_contents_list
1182 {
1183 /* Offset and size of the described part of the aggregate. */
1184 HOST_WIDE_INT offset, size;
1185 /* Known constant value or NULL if the contents is known to be unknown. */
1186 tree constant;
1187 /* Pointer to the next structure in the list. */
1188 struct ipa_known_agg_contents_list *next;
1189 };
1190
1191 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1192 in ARG is filled in with constant values. ARG can either be an aggregate
1193 expression or a pointer to an aggregate. JFUNC is the jump function into
1194 which the constants are subsequently stored. */
1195
1196 static void
1197 determine_known_aggregate_parts (gimple call, tree arg,
1198 struct ipa_jump_func *jfunc)
1199 {
1200 struct ipa_known_agg_contents_list *list = NULL;
1201 int item_count = 0, const_count = 0;
1202 HOST_WIDE_INT arg_offset, arg_size;
1203 gimple_stmt_iterator gsi;
1204 tree arg_base;
1205 bool check_ref, by_ref;
1206 ao_ref r;
1207
1208 /* The function operates in three stages. First, we prepare check_ref, r,
1209 arg_base and arg_offset based on what is actually passed as an actual
1210 argument. */
1211
1212 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1213 {
1214 by_ref = true;
1215 if (TREE_CODE (arg) == SSA_NAME)
1216 {
1217 tree type_size;
1218 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1219 return;
1220 check_ref = true;
1221 arg_base = arg;
1222 arg_offset = 0;
1223 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1224 arg_size = tree_low_cst (type_size, 1);
1225 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1226 }
1227 else if (TREE_CODE (arg) == ADDR_EXPR)
1228 {
1229 HOST_WIDE_INT arg_max_size;
1230
1231 arg = TREE_OPERAND (arg, 0);
1232 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1233 &arg_max_size);
1234 if (arg_max_size == -1
1235 || arg_max_size != arg_size
1236 || arg_offset < 0)
1237 return;
1238 if (DECL_P (arg_base))
1239 {
1240 tree size;
1241 check_ref = false;
1242 size = build_int_cst (integer_type_node, arg_size);
1243 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1244 }
1245 else
1246 return;
1247 }
1248 else
1249 return;
1250 }
1251 else
1252 {
1253 HOST_WIDE_INT arg_max_size;
1254
1255 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1256
1257 by_ref = false;
1258 check_ref = false;
1259 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1260 &arg_max_size);
1261 if (arg_max_size == -1
1262 || arg_max_size != arg_size
1263 || arg_offset < 0)
1264 return;
1265
1266 ao_ref_init (&r, arg);
1267 }
1268
1269 /* Second stage walks back the BB, looks at individual statements and as long
1270 as it is confident of how the statements affect contents of the
1271 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1272 describing it. */
1273 gsi = gsi_for_stmt (call);
1274 gsi_prev (&gsi);
1275 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1276 {
1277 struct ipa_known_agg_contents_list *n, **p;
1278 gimple stmt = gsi_stmt (gsi);
1279 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1280 tree lhs, rhs, lhs_base;
1281 bool partial_overlap;
1282
1283 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1284 continue;
1285 if (!gimple_assign_single_p (stmt))
1286 break;
1287
1288 lhs = gimple_assign_lhs (stmt);
1289 rhs = gimple_assign_rhs1 (stmt);
1290 if (!is_gimple_reg_type (rhs))
1291 break;
1292
1293 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1294 &lhs_max_size);
1295 if (lhs_max_size == -1
1296 || lhs_max_size != lhs_size
1297 || (lhs_offset < arg_offset
1298 && lhs_offset + lhs_size > arg_offset)
1299 || (lhs_offset < arg_offset + arg_size
1300 && lhs_offset + lhs_size > arg_offset + arg_size))
1301 break;
1302
1303 if (check_ref)
1304 {
1305 if (TREE_CODE (lhs_base) != MEM_REF
1306 || TREE_OPERAND (lhs_base, 0) != arg_base
1307 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1308 break;
1309 }
1310 else if (lhs_base != arg_base)
1311 break;
1312
1313 if (lhs_offset + lhs_size < arg_offset
1314 || lhs_offset >= (arg_offset + arg_size))
1315 continue;
1316
1317 partial_overlap = false;
1318 p = &list;
1319 while (*p && (*p)->offset < lhs_offset)
1320 {
1321 if ((*p)->offset + (*p)->size > lhs_offset)
1322 {
1323 partial_overlap = true;
1324 break;
1325 }
1326 p = &(*p)->next;
1327 }
1328 if (partial_overlap)
1329 break;
1330 if (*p && (*p)->offset < lhs_offset + lhs_size)
1331 {
1332 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1333 /* We already know this value is subsequently overwritten with
1334 something else. */
1335 continue;
1336 else
1337 /* Otherwise this is a partial overlap which we cannot
1338 represent. */
1339 break;
1340 }
1341
1342 rhs = get_ssa_def_if_simple_copy (rhs);
1343 n = XALLOCA (struct ipa_known_agg_contents_list);
1344 n->size = lhs_size;
1345 n->offset = lhs_offset;
1346 if (is_gimple_ip_invariant (rhs))
1347 {
1348 n->constant = rhs;
1349 const_count++;
1350 }
1351 else
1352 n->constant = NULL_TREE;
1353 n->next = *p;
1354 *p = n;
1355
1356 item_count++;
1357 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1358 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1359 break;
1360 }
1361
1362 /* Third stage just goes over the list and creates an appropriate vector of
1363 ipa_agg_jf_item structures out of it, of sourse only if there are
1364 any known constants to begin with. */
1365
1366 if (const_count)
1367 {
1368 jfunc->agg.by_ref = by_ref;
1369 jfunc->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, const_count);
1370 while (list)
1371 {
1372 if (list->constant)
1373 {
1374 struct ipa_agg_jf_item item;
1375 item.offset = list->offset - arg_offset;
1376 item.value = prune_expression_for_jf (list->constant);
1377 VEC_quick_push (ipa_agg_jf_item_t, jfunc->agg.items, item);
1378 }
1379 list = list->next;
1380 }
1381 }
1382 }
1383
1384 /* Compute jump function for all arguments of callsite CS and insert the
1385 information in the jump_functions array in the ipa_edge_args corresponding
1386 to this callsite. */
1387
1388 static void
1389 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1390 struct cgraph_edge *cs)
1391 {
1392 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1393 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1394 gimple call = cs->call_stmt;
1395 int n, arg_num = gimple_call_num_args (call);
1396
1397 if (arg_num == 0 || args->jump_functions)
1398 return;
1399 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
1400
1401 for (n = 0; n < arg_num; n++)
1402 {
1403 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1404 tree arg = gimple_call_arg (call, n);
1405
1406 if (is_gimple_ip_invariant (arg))
1407 ipa_set_jf_constant (jfunc, arg);
1408 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1409 && TREE_CODE (arg) == PARM_DECL)
1410 {
1411 int index = ipa_get_param_decl_index (info, arg);
1412
1413 gcc_assert (index >=0);
1414 /* Aggregate passed by value, check for pass-through, otherwise we
1415 will attempt to fill in aggregate contents later in this
1416 for cycle. */
1417 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1418 {
1419 ipa_set_jf_simple_pass_through (jfunc, index, false);
1420 continue;
1421 }
1422 }
1423 else if (TREE_CODE (arg) == SSA_NAME)
1424 {
1425 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1426 {
1427 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1428 if (index >= 0
1429 && !detect_type_change_ssa (arg, call, jfunc))
1430 {
1431 bool agg_p;
1432 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1433 call, arg);
1434 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1435 }
1436 }
1437 else
1438 {
1439 gimple stmt = SSA_NAME_DEF_STMT (arg);
1440 if (is_gimple_assign (stmt))
1441 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1442 call, stmt, arg);
1443 else if (gimple_code (stmt) == GIMPLE_PHI)
1444 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1445 call, stmt);
1446 }
1447 }
1448 else
1449 compute_known_type_jump_func (arg, jfunc, call);
1450
1451 if ((jfunc->type != IPA_JF_PASS_THROUGH
1452 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1453 && (jfunc->type != IPA_JF_ANCESTOR
1454 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1455 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1456 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1457 determine_known_aggregate_parts (call, arg, jfunc);
1458 }
1459 }
1460
1461 /* Compute jump functions for all edges - both direct and indirect - outgoing
1462 from NODE. Also count the actual arguments in the process. */
1463
1464 static void
1465 ipa_compute_jump_functions (struct cgraph_node *node,
1466 struct param_analysis_info *parms_ainfo)
1467 {
1468 struct cgraph_edge *cs;
1469
1470 for (cs = node->callees; cs; cs = cs->next_callee)
1471 {
1472 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1473 NULL);
1474 /* We do not need to bother analyzing calls to unknown
1475 functions unless they may become known during lto/whopr. */
1476 if (!callee->analyzed && !flag_lto)
1477 continue;
1478 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1479 }
1480
1481 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1482 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1483 }
1484
1485 /* If STMT looks like a statement loading a value from a member pointer formal
1486 parameter, return that parameter and store the offset of the field to
1487 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1488 might be clobbered). If USE_DELTA, then we look for a use of the delta
1489 field rather than the pfn. */
1490
1491 static tree
1492 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1493 HOST_WIDE_INT *offset_p)
1494 {
1495 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1496
1497 if (!gimple_assign_single_p (stmt))
1498 return NULL_TREE;
1499
1500 rhs = gimple_assign_rhs1 (stmt);
1501 if (TREE_CODE (rhs) == COMPONENT_REF)
1502 {
1503 ref_field = TREE_OPERAND (rhs, 1);
1504 rhs = TREE_OPERAND (rhs, 0);
1505 }
1506 else
1507 ref_field = NULL_TREE;
1508 if (TREE_CODE (rhs) != MEM_REF)
1509 return NULL_TREE;
1510 rec = TREE_OPERAND (rhs, 0);
1511 if (TREE_CODE (rec) != ADDR_EXPR)
1512 return NULL_TREE;
1513 rec = TREE_OPERAND (rec, 0);
1514 if (TREE_CODE (rec) != PARM_DECL
1515 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1516 return NULL_TREE;
1517 ref_offset = TREE_OPERAND (rhs, 1);
1518
1519 if (use_delta)
1520 fld = delta_field;
1521 else
1522 fld = ptr_field;
1523 if (offset_p)
1524 *offset_p = int_bit_position (fld);
1525
1526 if (ref_field)
1527 {
1528 if (integer_nonzerop (ref_offset))
1529 return NULL_TREE;
1530 return ref_field == fld ? rec : NULL_TREE;
1531 }
1532 else
1533 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1534 : NULL_TREE;
1535 }
1536
1537 /* Returns true iff T is an SSA_NAME defined by a statement. */
1538
1539 static bool
1540 ipa_is_ssa_with_stmt_def (tree t)
1541 {
1542 if (TREE_CODE (t) == SSA_NAME
1543 && !SSA_NAME_IS_DEFAULT_DEF (t))
1544 return true;
1545 else
1546 return false;
1547 }
1548
1549 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1550 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1551 indirect call graph edge. */
1552
1553 static struct cgraph_edge *
1554 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1555 {
1556 struct cgraph_edge *cs;
1557
1558 cs = cgraph_edge (node, stmt);
1559 cs->indirect_info->param_index = param_index;
1560 cs->indirect_info->offset = 0;
1561 cs->indirect_info->polymorphic = 0;
1562 cs->indirect_info->agg_contents = 0;
1563 return cs;
1564 }
1565
1566 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1567 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1568 intermediate information about each formal parameter. Currently it checks
1569 whether the call calls a pointer that is a formal parameter and if so, the
1570 parameter is marked with the called flag and an indirect call graph edge
1571 describing the call is created. This is very simple for ordinary pointers
1572 represented in SSA but not-so-nice when it comes to member pointers. The
1573 ugly part of this function does nothing more than trying to match the
1574 pattern of such a call. An example of such a pattern is the gimple dump
1575 below, the call is on the last line:
1576
1577 <bb 2>:
1578 f$__delta_5 = f.__delta;
1579 f$__pfn_24 = f.__pfn;
1580
1581 or
1582 <bb 2>:
1583 f$__delta_5 = MEM[(struct *)&f];
1584 f$__pfn_24 = MEM[(struct *)&f + 4B];
1585
1586 and a few lines below:
1587
1588 <bb 5>
1589 D.2496_3 = (int) f$__pfn_24;
1590 D.2497_4 = D.2496_3 & 1;
1591 if (D.2497_4 != 0)
1592 goto <bb 3>;
1593 else
1594 goto <bb 4>;
1595
1596 <bb 6>:
1597 D.2500_7 = (unsigned int) f$__delta_5;
1598 D.2501_8 = &S + D.2500_7;
1599 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1600 D.2503_10 = *D.2502_9;
1601 D.2504_12 = f$__pfn_24 + -1;
1602 D.2505_13 = (unsigned int) D.2504_12;
1603 D.2506_14 = D.2503_10 + D.2505_13;
1604 D.2507_15 = *D.2506_14;
1605 iftmp.11_16 = (String:: *) D.2507_15;
1606
1607 <bb 7>:
1608 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1609 D.2500_19 = (unsigned int) f$__delta_5;
1610 D.2508_20 = &S + D.2500_19;
1611 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1612
1613 Such patterns are results of simple calls to a member pointer:
1614
1615 int doprinting (int (MyString::* f)(int) const)
1616 {
1617 MyString S ("somestring");
1618
1619 return (S.*f)(4);
1620 }
1621
1622 Moreover, the function also looks for called pointers loaded from aggregates
1623 passed by value or reference. */
1624
1625 static void
1626 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1627 struct ipa_node_params *info,
1628 struct param_analysis_info *parms_ainfo,
1629 gimple call, tree target)
1630 {
1631 gimple def;
1632 tree n1, n2;
1633 gimple d1, d2;
1634 tree rec, rec2, cond;
1635 gimple branch;
1636 int index;
1637 basic_block bb, virt_bb, join;
1638 HOST_WIDE_INT offset;
1639 bool by_ref;
1640
1641 if (SSA_NAME_IS_DEFAULT_DEF (target))
1642 {
1643 tree var = SSA_NAME_VAR (target);
1644 index = ipa_get_param_decl_index (info, var);
1645 if (index >= 0)
1646 ipa_note_param_call (node, index, call);
1647 return;
1648 }
1649
1650 def = SSA_NAME_DEF_STMT (target);
1651 if (gimple_assign_single_p (def)
1652 && ipa_load_from_parm_agg_1 (info, parms_ainfo, def,
1653 gimple_assign_rhs1 (def), &index, &offset,
1654 &by_ref))
1655 {
1656 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1657 cs->indirect_info->offset = offset;
1658 cs->indirect_info->agg_contents = 1;
1659 cs->indirect_info->by_ref = by_ref;
1660 return;
1661 }
1662
1663 /* Now we need to try to match the complex pattern of calling a member
1664 pointer. */
1665 if (gimple_code (def) != GIMPLE_PHI
1666 || gimple_phi_num_args (def) != 2
1667 || !POINTER_TYPE_P (TREE_TYPE (target))
1668 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1669 return;
1670
1671 /* First, we need to check whether one of these is a load from a member
1672 pointer that is a parameter to this function. */
1673 n1 = PHI_ARG_DEF (def, 0);
1674 n2 = PHI_ARG_DEF (def, 1);
1675 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1676 return;
1677 d1 = SSA_NAME_DEF_STMT (n1);
1678 d2 = SSA_NAME_DEF_STMT (n2);
1679
1680 join = gimple_bb (def);
1681 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1682 {
1683 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1684 return;
1685
1686 bb = EDGE_PRED (join, 0)->src;
1687 virt_bb = gimple_bb (d2);
1688 }
1689 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1690 {
1691 bb = EDGE_PRED (join, 1)->src;
1692 virt_bb = gimple_bb (d1);
1693 }
1694 else
1695 return;
1696
1697 /* Second, we need to check that the basic blocks are laid out in the way
1698 corresponding to the pattern. */
1699
1700 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1701 || single_pred (virt_bb) != bb
1702 || single_succ (virt_bb) != join)
1703 return;
1704
1705 /* Third, let's see that the branching is done depending on the least
1706 significant bit of the pfn. */
1707
1708 branch = last_stmt (bb);
1709 if (!branch || gimple_code (branch) != GIMPLE_COND)
1710 return;
1711
1712 if ((gimple_cond_code (branch) != NE_EXPR
1713 && gimple_cond_code (branch) != EQ_EXPR)
1714 || !integer_zerop (gimple_cond_rhs (branch)))
1715 return;
1716
1717 cond = gimple_cond_lhs (branch);
1718 if (!ipa_is_ssa_with_stmt_def (cond))
1719 return;
1720
1721 def = SSA_NAME_DEF_STMT (cond);
1722 if (!is_gimple_assign (def)
1723 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1724 || !integer_onep (gimple_assign_rhs2 (def)))
1725 return;
1726
1727 cond = gimple_assign_rhs1 (def);
1728 if (!ipa_is_ssa_with_stmt_def (cond))
1729 return;
1730
1731 def = SSA_NAME_DEF_STMT (cond);
1732
1733 if (is_gimple_assign (def)
1734 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1735 {
1736 cond = gimple_assign_rhs1 (def);
1737 if (!ipa_is_ssa_with_stmt_def (cond))
1738 return;
1739 def = SSA_NAME_DEF_STMT (cond);
1740 }
1741
1742 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1743 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1744 == ptrmemfunc_vbit_in_delta),
1745 NULL);
1746 if (rec != rec2)
1747 return;
1748
1749 index = ipa_get_param_decl_index (info, rec);
1750 if (index >= 0
1751 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1752 {
1753 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1754 cs->indirect_info->offset = offset;
1755 cs->indirect_info->agg_contents = 1;
1756 }
1757
1758 return;
1759 }
1760
1761 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1762 object referenced in the expression is a formal parameter of the caller
1763 (described by INFO), create a call note for the statement. */
1764
1765 static void
1766 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1767 struct ipa_node_params *info, gimple call,
1768 tree target)
1769 {
1770 struct cgraph_edge *cs;
1771 struct cgraph_indirect_call_info *ii;
1772 struct ipa_jump_func jfunc;
1773 tree obj = OBJ_TYPE_REF_OBJECT (target);
1774 int index;
1775 HOST_WIDE_INT anc_offset;
1776
1777 if (!flag_devirtualize)
1778 return;
1779
1780 if (TREE_CODE (obj) != SSA_NAME)
1781 return;
1782
1783 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1784 {
1785 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1786 return;
1787
1788 anc_offset = 0;
1789 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1790 gcc_assert (index >= 0);
1791 if (detect_type_change_ssa (obj, call, &jfunc))
1792 return;
1793 }
1794 else
1795 {
1796 gimple stmt = SSA_NAME_DEF_STMT (obj);
1797 tree expr;
1798
1799 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1800 if (!expr)
1801 return;
1802 index = ipa_get_param_decl_index (info,
1803 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1804 gcc_assert (index >= 0);
1805 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1806 return;
1807 }
1808
1809 cs = ipa_note_param_call (node, index, call);
1810 ii = cs->indirect_info;
1811 ii->offset = anc_offset;
1812 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1813 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1814 ii->polymorphic = 1;
1815 }
1816
1817 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1818 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1819 containing intermediate information about each formal parameter. */
1820
1821 static void
1822 ipa_analyze_call_uses (struct cgraph_node *node,
1823 struct ipa_node_params *info,
1824 struct param_analysis_info *parms_ainfo, gimple call)
1825 {
1826 tree target = gimple_call_fn (call);
1827
1828 if (!target)
1829 return;
1830 if (TREE_CODE (target) == SSA_NAME)
1831 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1832 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1833 ipa_analyze_virtual_call_uses (node, info, call, target);
1834 }
1835
1836
1837 /* Analyze the call statement STMT with respect to formal parameters (described
1838 in INFO) of caller given by NODE. Currently it only checks whether formal
1839 parameters are called. PARMS_AINFO is a pointer to a vector containing
1840 intermediate information about each formal parameter. */
1841
1842 static void
1843 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1844 struct param_analysis_info *parms_ainfo, gimple stmt)
1845 {
1846 if (is_gimple_call (stmt))
1847 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1848 }
1849
1850 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1851 If OP is a parameter declaration, mark it as used in the info structure
1852 passed in DATA. */
1853
1854 static bool
1855 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1856 tree op, void *data)
1857 {
1858 struct ipa_node_params *info = (struct ipa_node_params *) data;
1859
1860 op = get_base_address (op);
1861 if (op
1862 && TREE_CODE (op) == PARM_DECL)
1863 {
1864 int index = ipa_get_param_decl_index (info, op);
1865 gcc_assert (index >= 0);
1866 ipa_set_param_used (info, index, true);
1867 }
1868
1869 return false;
1870 }
1871
1872 /* Scan the function body of NODE and inspect the uses of formal parameters.
1873 Store the findings in various structures of the associated ipa_node_params
1874 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1875 vector containing intermediate information about each formal parameter. */
1876
1877 static void
1878 ipa_analyze_params_uses (struct cgraph_node *node,
1879 struct param_analysis_info *parms_ainfo)
1880 {
1881 tree decl = node->symbol.decl;
1882 basic_block bb;
1883 struct function *func;
1884 gimple_stmt_iterator gsi;
1885 struct ipa_node_params *info = IPA_NODE_REF (node);
1886 int i;
1887
1888 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1889 return;
1890
1891 for (i = 0; i < ipa_get_param_count (info); i++)
1892 {
1893 tree parm = ipa_get_param (info, i);
1894 tree ddef;
1895 /* For SSA regs see if parameter is used. For non-SSA we compute
1896 the flag during modification analysis. */
1897 if (is_gimple_reg (parm)
1898 && (ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1899 parm)) != NULL_TREE
1900 && !has_zero_uses (ddef))
1901 ipa_set_param_used (info, i, true);
1902 }
1903
1904 func = DECL_STRUCT_FUNCTION (decl);
1905 FOR_EACH_BB_FN (bb, func)
1906 {
1907 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1908 {
1909 gimple stmt = gsi_stmt (gsi);
1910
1911 if (is_gimple_debug (stmt))
1912 continue;
1913
1914 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1915 walk_stmt_load_store_addr_ops (stmt, info,
1916 visit_ref_for_mod_analysis,
1917 visit_ref_for_mod_analysis,
1918 visit_ref_for_mod_analysis);
1919 }
1920 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1921 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1922 visit_ref_for_mod_analysis,
1923 visit_ref_for_mod_analysis,
1924 visit_ref_for_mod_analysis);
1925 }
1926
1927 info->uses_analysis_done = 1;
1928 }
1929
1930 /* Initialize the array describing properties of of formal parameters
1931 of NODE, analyze their uses and compute jump functions associated
1932 with actual arguments of calls from within NODE. */
1933
1934 void
1935 ipa_analyze_node (struct cgraph_node *node)
1936 {
1937 struct ipa_node_params *info;
1938 struct param_analysis_info *parms_ainfo;
1939 int i, param_count;
1940
1941 ipa_check_create_node_params ();
1942 ipa_check_create_edge_args ();
1943 info = IPA_NODE_REF (node);
1944 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
1945 ipa_initialize_node_params (node);
1946
1947 param_count = ipa_get_param_count (info);
1948 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1949 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1950
1951 ipa_analyze_params_uses (node, parms_ainfo);
1952 ipa_compute_jump_functions (node, parms_ainfo);
1953
1954 for (i = 0; i < param_count; i++)
1955 {
1956 if (parms_ainfo[i].parm_visited_statements)
1957 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
1958 if (parms_ainfo[i].pt_visited_statements)
1959 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
1960 }
1961
1962 pop_cfun ();
1963 }
1964
1965
1966 /* Update the jump function DST when the call graph edge corresponding to SRC is
1967 is being inlined, knowing that DST is of type ancestor and src of known
1968 type. */
1969
1970 static void
1971 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1972 struct ipa_jump_func *dst)
1973 {
1974 HOST_WIDE_INT combined_offset;
1975 tree combined_type;
1976
1977 combined_offset = ipa_get_jf_known_type_offset (src)
1978 + ipa_get_jf_ancestor_offset (dst);
1979 combined_type = ipa_get_jf_ancestor_type (dst);
1980
1981 ipa_set_jf_known_type (dst, combined_offset,
1982 ipa_get_jf_known_type_base_type (src),
1983 combined_type);
1984 }
1985
1986 /* Update the jump functions associated with call graph edge E when the call
1987 graph edge CS is being inlined, assuming that E->caller is already (possibly
1988 indirectly) inlined into CS->callee and that E has not been inlined. */
1989
1990 static void
1991 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1992 struct cgraph_edge *e)
1993 {
1994 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1995 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1996 int count = ipa_get_cs_argument_count (args);
1997 int i;
1998
1999 for (i = 0; i < count; i++)
2000 {
2001 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2002
2003 if (dst->type == IPA_JF_ANCESTOR)
2004 {
2005 struct ipa_jump_func *src;
2006 int dst_fid = dst->value.ancestor.formal_id;
2007
2008 /* Variable number of arguments can cause havoc if we try to access
2009 one that does not exist in the inlined edge. So make sure we
2010 don't. */
2011 if (dst_fid >= ipa_get_cs_argument_count (top))
2012 {
2013 dst->type = IPA_JF_UNKNOWN;
2014 continue;
2015 }
2016
2017 src = ipa_get_ith_jump_func (top, dst_fid);
2018
2019 if (src->agg.items
2020 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2021 {
2022 struct ipa_agg_jf_item *item;
2023 int j;
2024
2025 /* Currently we do not produce clobber aggregate jump functions,
2026 replace with merging when we do. */
2027 gcc_assert (!dst->agg.items);
2028
2029 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc, src->agg.items);
2030 dst->agg.by_ref = src->agg.by_ref;
2031 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, dst->agg.items, j, item)
2032 item->offset -= dst->value.ancestor.offset;
2033 }
2034
2035 if (src->type == IPA_JF_KNOWN_TYPE)
2036 combine_known_type_and_ancestor_jfs (src, dst);
2037 else if (src->type == IPA_JF_PASS_THROUGH
2038 && src->value.pass_through.operation == NOP_EXPR)
2039 {
2040 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2041 dst->value.ancestor.agg_preserved &=
2042 src->value.pass_through.agg_preserved;
2043 }
2044 else if (src->type == IPA_JF_ANCESTOR)
2045 {
2046 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2047 dst->value.ancestor.offset += src->value.ancestor.offset;
2048 dst->value.ancestor.agg_preserved &=
2049 src->value.ancestor.agg_preserved;
2050 }
2051 else
2052 dst->type = IPA_JF_UNKNOWN;
2053 }
2054 else if (dst->type == IPA_JF_PASS_THROUGH)
2055 {
2056 struct ipa_jump_func *src;
2057 /* We must check range due to calls with variable number of arguments
2058 and we cannot combine jump functions with operations. */
2059 if (dst->value.pass_through.operation == NOP_EXPR
2060 && (dst->value.pass_through.formal_id
2061 < ipa_get_cs_argument_count (top)))
2062 {
2063 bool agg_p;
2064 int dst_fid = dst->value.pass_through.formal_id;
2065 src = ipa_get_ith_jump_func (top, dst_fid);
2066 agg_p = dst->value.pass_through.agg_preserved;
2067
2068 dst->type = src->type;
2069 dst->value = src->value;
2070
2071 if (src->agg.items
2072 && (agg_p || !src->agg.by_ref))
2073 {
2074 /* Currently we do not produce clobber aggregate jump
2075 functions, replace with merging when we do. */
2076 gcc_assert (!dst->agg.items);
2077
2078 dst->agg.by_ref = src->agg.by_ref;
2079 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc,
2080 src->agg.items);
2081 }
2082
2083 if (!agg_p)
2084 {
2085 if (dst->type == IPA_JF_PASS_THROUGH)
2086 dst->value.pass_through.agg_preserved = false;
2087 else if (dst->type == IPA_JF_ANCESTOR)
2088 dst->value.ancestor.agg_preserved = false;
2089 }
2090 }
2091 else
2092 dst->type = IPA_JF_UNKNOWN;
2093 }
2094 }
2095 }
2096
2097 /* If TARGET is an addr_expr of a function declaration, make it the destination
2098 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2099
2100 struct cgraph_edge *
2101 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2102 {
2103 struct cgraph_node *callee;
2104 struct inline_edge_summary *es = inline_edge_summary (ie);
2105
2106 if (TREE_CODE (target) == ADDR_EXPR)
2107 target = TREE_OPERAND (target, 0);
2108 if (TREE_CODE (target) != FUNCTION_DECL)
2109 return NULL;
2110 callee = cgraph_get_node (target);
2111 if (!callee)
2112 return NULL;
2113 ipa_check_create_node_params ();
2114
2115 /* We can not make edges to inline clones. It is bug that someone removed
2116 the cgraph node too early. */
2117 gcc_assert (!callee->global.inlined_to);
2118
2119 cgraph_make_edge_direct (ie, callee);
2120 es = inline_edge_summary (ie);
2121 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2122 - eni_size_weights.call_cost);
2123 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2124 - eni_time_weights.call_cost);
2125 if (dump_file)
2126 {
2127 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2128 "(%s/%i -> %s/%i), for stmt ",
2129 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2130 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
2131 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
2132 if (ie->call_stmt)
2133 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2134 else
2135 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2136 }
2137 callee = cgraph_function_or_thunk_node (callee, NULL);
2138
2139 return ie;
2140 }
2141
2142 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2143 return NULL if there is not any. BY_REF specifies whether the value has to
2144 be passed by reference or by value. */
2145
2146 tree
2147 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2148 HOST_WIDE_INT offset, bool by_ref)
2149 {
2150 struct ipa_agg_jf_item *item;
2151 int i;
2152
2153 if (by_ref != agg->by_ref)
2154 return NULL;
2155
2156 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, agg->items, i, item)
2157 {
2158 if (item->offset == offset)
2159 {
2160 /* Currently we do not have clobber values, return NULL for them once
2161 we do. */
2162 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2163 return item->value;
2164 }
2165 else if (item->offset > offset)
2166 return NULL;
2167 }
2168 return NULL;
2169 }
2170
2171 /* Try to find a destination for indirect edge IE that corresponds to a simple
2172 call or a call of a member function pointer and where the destination is a
2173 pointer formal parameter described by jump function JFUNC. If it can be
2174 determined, return the newly direct edge, otherwise return NULL. */
2175
2176 static struct cgraph_edge *
2177 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2178 struct ipa_jump_func *jfunc)
2179 {
2180 tree target;
2181
2182 if (ie->indirect_info->agg_contents)
2183 {
2184 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2185 ie->indirect_info->offset,
2186 ie->indirect_info->by_ref);
2187 if (!target)
2188 return NULL;
2189 }
2190 else
2191 {
2192 if (jfunc->type != IPA_JF_CONST)
2193 return NULL;
2194 target = ipa_get_jf_constant (jfunc);
2195 }
2196 return ipa_make_edge_direct_to_target (ie, target);
2197 }
2198
2199 /* Try to find a destination for indirect edge IE that corresponds to a
2200 virtual call based on a formal parameter which is described by jump
2201 function JFUNC and if it can be determined, make it direct and return the
2202 direct edge. Otherwise, return NULL. */
2203
2204 static struct cgraph_edge *
2205 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2206 struct ipa_jump_func *jfunc)
2207 {
2208 tree binfo, target;
2209
2210 if (jfunc->type != IPA_JF_KNOWN_TYPE)
2211 return NULL;
2212
2213 binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
2214 gcc_checking_assert (binfo);
2215 binfo = get_binfo_at_offset (binfo, ipa_get_jf_known_type_offset (jfunc)
2216 + ie->indirect_info->offset,
2217 ie->indirect_info->otr_type);
2218 if (binfo)
2219 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2220 binfo);
2221 else
2222 return NULL;
2223
2224 if (target)
2225 return ipa_make_edge_direct_to_target (ie, target);
2226 else
2227 return NULL;
2228 }
2229
2230 /* Update the param called notes associated with NODE when CS is being inlined,
2231 assuming NODE is (potentially indirectly) inlined into CS->callee.
2232 Moreover, if the callee is discovered to be constant, create a new cgraph
2233 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2234 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2235
2236 static bool
2237 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2238 struct cgraph_node *node,
2239 VEC (cgraph_edge_p, heap) **new_edges)
2240 {
2241 struct ipa_edge_args *top;
2242 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2243 bool res = false;
2244
2245 ipa_check_create_edge_args ();
2246 top = IPA_EDGE_REF (cs);
2247
2248 for (ie = node->indirect_calls; ie; ie = next_ie)
2249 {
2250 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2251 struct ipa_jump_func *jfunc;
2252 int param_index;
2253
2254 next_ie = ie->next_callee;
2255
2256 if (ici->param_index == -1)
2257 continue;
2258
2259 /* We must check range due to calls with variable number of arguments: */
2260 if (ici->param_index >= ipa_get_cs_argument_count (top))
2261 {
2262 ici->param_index = -1;
2263 continue;
2264 }
2265
2266 param_index = ici->param_index;
2267 jfunc = ipa_get_ith_jump_func (top, param_index);
2268 if (jfunc->type == IPA_JF_PASS_THROUGH
2269 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2270 {
2271 if (ici->agg_contents
2272 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2273 ici->param_index = -1;
2274 else
2275 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2276 }
2277 else if (jfunc->type == IPA_JF_ANCESTOR)
2278 {
2279 if (ici->agg_contents
2280 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2281 ici->param_index = -1;
2282 else
2283 {
2284 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2285 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2286 }
2287 }
2288 else
2289 /* Either we can find a destination for this edge now or never. */
2290 ici->param_index = -1;
2291
2292 if (!flag_indirect_inlining)
2293 continue;
2294
2295 if (ici->polymorphic)
2296 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
2297 else
2298 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
2299
2300 if (new_direct_edge)
2301 {
2302 new_direct_edge->indirect_inlining_edge = 1;
2303 if (new_direct_edge->call_stmt)
2304 new_direct_edge->call_stmt_cannot_inline_p
2305 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
2306 new_direct_edge->callee->symbol.decl);
2307 if (new_edges)
2308 {
2309 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
2310 new_direct_edge);
2311 top = IPA_EDGE_REF (cs);
2312 res = true;
2313 }
2314 }
2315 }
2316
2317 return res;
2318 }
2319
2320 /* Recursively traverse subtree of NODE (including node) made of inlined
2321 cgraph_edges when CS has been inlined and invoke
2322 update_indirect_edges_after_inlining on all nodes and
2323 update_jump_functions_after_inlining on all non-inlined edges that lead out
2324 of this subtree. Newly discovered indirect edges will be added to
2325 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2326 created. */
2327
2328 static bool
2329 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2330 struct cgraph_node *node,
2331 VEC (cgraph_edge_p, heap) **new_edges)
2332 {
2333 struct cgraph_edge *e;
2334 bool res;
2335
2336 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2337
2338 for (e = node->callees; e; e = e->next_callee)
2339 if (!e->inline_failed)
2340 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2341 else
2342 update_jump_functions_after_inlining (cs, e);
2343 for (e = node->indirect_calls; e; e = e->next_callee)
2344 update_jump_functions_after_inlining (cs, e);
2345
2346 return res;
2347 }
2348
2349 /* Update jump functions and call note functions on inlining the call site CS.
2350 CS is expected to lead to a node already cloned by
2351 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2352 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2353 created. */
2354
2355 bool
2356 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2357 VEC (cgraph_edge_p, heap) **new_edges)
2358 {
2359 bool changed;
2360 /* Do nothing if the preparation phase has not been carried out yet
2361 (i.e. during early inlining). */
2362 if (!ipa_node_params_vector)
2363 return false;
2364 gcc_assert (ipa_edge_args_vector);
2365
2366 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2367
2368 /* We do not keep jump functions of inlined edges up to date. Better to free
2369 them so we do not access them accidentally. */
2370 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2371 return changed;
2372 }
2373
2374 /* Frees all dynamically allocated structures that the argument info points
2375 to. */
2376
2377 void
2378 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2379 {
2380 if (args->jump_functions)
2381 ggc_free (args->jump_functions);
2382
2383 memset (args, 0, sizeof (*args));
2384 }
2385
2386 /* Free all ipa_edge structures. */
2387
2388 void
2389 ipa_free_all_edge_args (void)
2390 {
2391 int i;
2392 struct ipa_edge_args *args;
2393
2394 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
2395 ipa_free_edge_args_substructures (args);
2396
2397 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
2398 ipa_edge_args_vector = NULL;
2399 }
2400
2401 /* Frees all dynamically allocated structures that the param info points
2402 to. */
2403
2404 void
2405 ipa_free_node_params_substructures (struct ipa_node_params *info)
2406 {
2407 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
2408 free (info->lattices);
2409 /* Lattice values and their sources are deallocated with their alocation
2410 pool. */
2411 VEC_free (tree, heap, info->known_vals);
2412 memset (info, 0, sizeof (*info));
2413 }
2414
2415 /* Free all ipa_node_params structures. */
2416
2417 void
2418 ipa_free_all_node_params (void)
2419 {
2420 int i;
2421 struct ipa_node_params *info;
2422
2423 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
2424 ipa_free_node_params_substructures (info);
2425
2426 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
2427 ipa_node_params_vector = NULL;
2428 }
2429
2430 /* Hook that is called by cgraph.c when an edge is removed. */
2431
2432 static void
2433 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2434 {
2435 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2436 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
2437 <= (unsigned)cs->uid)
2438 return;
2439 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2440 }
2441
2442 /* Hook that is called by cgraph.c when a node is removed. */
2443
2444 static void
2445 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2446 {
2447 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2448 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
2449 <= (unsigned)node->uid)
2450 return;
2451 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2452 }
2453
2454 /* Hook that is called by cgraph.c when an edge is duplicated. */
2455
2456 static void
2457 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2458 __attribute__((unused)) void *data)
2459 {
2460 struct ipa_edge_args *old_args, *new_args;
2461 unsigned int i;
2462
2463 ipa_check_create_edge_args ();
2464
2465 old_args = IPA_EDGE_REF (src);
2466 new_args = IPA_EDGE_REF (dst);
2467
2468 new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
2469 old_args->jump_functions);
2470
2471 for (i = 0; i < VEC_length (ipa_jump_func_t, old_args->jump_functions); i++)
2472 VEC_index (ipa_jump_func_t, new_args->jump_functions, i).agg.items
2473 = VEC_copy (ipa_agg_jf_item_t, gc,
2474 VEC_index (ipa_jump_func_t,
2475 old_args->jump_functions, i).agg.items);
2476 }
2477
2478 /* Hook that is called by cgraph.c when a node is duplicated. */
2479
2480 static void
2481 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2482 ATTRIBUTE_UNUSED void *data)
2483 {
2484 struct ipa_node_params *old_info, *new_info;
2485
2486 ipa_check_create_node_params ();
2487 old_info = IPA_NODE_REF (src);
2488 new_info = IPA_NODE_REF (dst);
2489
2490 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
2491 old_info->descriptors);
2492 new_info->lattices = NULL;
2493 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2494
2495 new_info->uses_analysis_done = old_info->uses_analysis_done;
2496 new_info->node_enqueued = old_info->node_enqueued;
2497 }
2498
2499
2500 /* Analyze newly added function into callgraph. */
2501
2502 static void
2503 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2504 {
2505 ipa_analyze_node (node);
2506 }
2507
2508 /* Register our cgraph hooks if they are not already there. */
2509
2510 void
2511 ipa_register_cgraph_hooks (void)
2512 {
2513 if (!edge_removal_hook_holder)
2514 edge_removal_hook_holder =
2515 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2516 if (!node_removal_hook_holder)
2517 node_removal_hook_holder =
2518 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2519 if (!edge_duplication_hook_holder)
2520 edge_duplication_hook_holder =
2521 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2522 if (!node_duplication_hook_holder)
2523 node_duplication_hook_holder =
2524 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2525 function_insertion_hook_holder =
2526 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2527 }
2528
2529 /* Unregister our cgraph hooks if they are not already there. */
2530
2531 static void
2532 ipa_unregister_cgraph_hooks (void)
2533 {
2534 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2535 edge_removal_hook_holder = NULL;
2536 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2537 node_removal_hook_holder = NULL;
2538 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2539 edge_duplication_hook_holder = NULL;
2540 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2541 node_duplication_hook_holder = NULL;
2542 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2543 function_insertion_hook_holder = NULL;
2544 }
2545
2546 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2547 longer needed after ipa-cp. */
2548
2549 void
2550 ipa_free_all_structures_after_ipa_cp (void)
2551 {
2552 if (!optimize)
2553 {
2554 ipa_free_all_edge_args ();
2555 ipa_free_all_node_params ();
2556 free_alloc_pool (ipcp_sources_pool);
2557 free_alloc_pool (ipcp_values_pool);
2558 ipa_unregister_cgraph_hooks ();
2559 }
2560 }
2561
2562 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2563 longer needed after indirect inlining. */
2564
2565 void
2566 ipa_free_all_structures_after_iinln (void)
2567 {
2568 ipa_free_all_edge_args ();
2569 ipa_free_all_node_params ();
2570 ipa_unregister_cgraph_hooks ();
2571 if (ipcp_sources_pool)
2572 free_alloc_pool (ipcp_sources_pool);
2573 if (ipcp_values_pool)
2574 free_alloc_pool (ipcp_values_pool);
2575 }
2576
2577 /* Print ipa_tree_map data structures of all functions in the
2578 callgraph to F. */
2579
2580 void
2581 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2582 {
2583 int i, count;
2584 tree temp;
2585 struct ipa_node_params *info;
2586
2587 if (!node->analyzed)
2588 return;
2589 info = IPA_NODE_REF (node);
2590 fprintf (f, " function %s parameter descriptors:\n",
2591 cgraph_node_name (node));
2592 count = ipa_get_param_count (info);
2593 for (i = 0; i < count; i++)
2594 {
2595 temp = ipa_get_param (info, i);
2596 if (TREE_CODE (temp) == PARM_DECL)
2597 fprintf (f, " param %d : %s", i,
2598 (DECL_NAME (temp)
2599 ? (*lang_hooks.decl_printable_name) (temp, 2)
2600 : "(unnamed)"));
2601 if (ipa_is_param_used (info, i))
2602 fprintf (f, " used");
2603 fprintf (f, "\n");
2604 }
2605 }
2606
2607 /* Print ipa_tree_map data structures of all functions in the
2608 callgraph to F. */
2609
2610 void
2611 ipa_print_all_params (FILE * f)
2612 {
2613 struct cgraph_node *node;
2614
2615 fprintf (f, "\nFunction parameters:\n");
2616 FOR_EACH_FUNCTION (node)
2617 ipa_print_node_params (f, node);
2618 }
2619
2620 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2621
2622 VEC(tree, heap) *
2623 ipa_get_vector_of_formal_parms (tree fndecl)
2624 {
2625 VEC(tree, heap) *args;
2626 int count;
2627 tree parm;
2628
2629 count = count_formal_params (fndecl);
2630 args = VEC_alloc (tree, heap, count);
2631 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2632 VEC_quick_push (tree, args, parm);
2633
2634 return args;
2635 }
2636
2637 /* Return a heap allocated vector containing types of formal parameters of
2638 function type FNTYPE. */
2639
2640 static inline VEC(tree, heap) *
2641 get_vector_of_formal_parm_types (tree fntype)
2642 {
2643 VEC(tree, heap) *types;
2644 int count = 0;
2645 tree t;
2646
2647 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2648 count++;
2649
2650 types = VEC_alloc (tree, heap, count);
2651 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2652 VEC_quick_push (tree, types, TREE_VALUE (t));
2653
2654 return types;
2655 }
2656
2657 /* Modify the function declaration FNDECL and its type according to the plan in
2658 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2659 to reflect the actual parameters being modified which are determined by the
2660 base_index field. */
2661
2662 void
2663 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2664 const char *synth_parm_prefix)
2665 {
2666 VEC(tree, heap) *oparms, *otypes;
2667 tree orig_type, new_type = NULL;
2668 tree old_arg_types, t, new_arg_types = NULL;
2669 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2670 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2671 tree new_reversed = NULL;
2672 bool care_for_types, last_parm_void;
2673
2674 if (!synth_parm_prefix)
2675 synth_parm_prefix = "SYNTH";
2676
2677 oparms = ipa_get_vector_of_formal_parms (fndecl);
2678 orig_type = TREE_TYPE (fndecl);
2679 old_arg_types = TYPE_ARG_TYPES (orig_type);
2680
2681 /* The following test is an ugly hack, some functions simply don't have any
2682 arguments in their type. This is probably a bug but well... */
2683 care_for_types = (old_arg_types != NULL_TREE);
2684 if (care_for_types)
2685 {
2686 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2687 == void_type_node);
2688 otypes = get_vector_of_formal_parm_types (orig_type);
2689 if (last_parm_void)
2690 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2691 else
2692 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2693 }
2694 else
2695 {
2696 last_parm_void = false;
2697 otypes = NULL;
2698 }
2699
2700 for (i = 0; i < len; i++)
2701 {
2702 struct ipa_parm_adjustment *adj;
2703 gcc_assert (link);
2704
2705 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2706 parm = VEC_index (tree, oparms, adj->base_index);
2707 adj->base = parm;
2708
2709 if (adj->copy_param)
2710 {
2711 if (care_for_types)
2712 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2713 adj->base_index),
2714 new_arg_types);
2715 *link = parm;
2716 link = &DECL_CHAIN (parm);
2717 }
2718 else if (!adj->remove_param)
2719 {
2720 tree new_parm;
2721 tree ptype;
2722
2723 if (adj->by_ref)
2724 ptype = build_pointer_type (adj->type);
2725 else
2726 ptype = adj->type;
2727
2728 if (care_for_types)
2729 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2730
2731 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2732 ptype);
2733 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2734
2735 DECL_ARTIFICIAL (new_parm) = 1;
2736 DECL_ARG_TYPE (new_parm) = ptype;
2737 DECL_CONTEXT (new_parm) = fndecl;
2738 TREE_USED (new_parm) = 1;
2739 DECL_IGNORED_P (new_parm) = 1;
2740 layout_decl (new_parm, 0);
2741
2742 adj->base = parm;
2743 adj->reduction = new_parm;
2744
2745 *link = new_parm;
2746
2747 link = &DECL_CHAIN (new_parm);
2748 }
2749 }
2750
2751 *link = NULL_TREE;
2752
2753 if (care_for_types)
2754 {
2755 new_reversed = nreverse (new_arg_types);
2756 if (last_parm_void)
2757 {
2758 if (new_reversed)
2759 TREE_CHAIN (new_arg_types) = void_list_node;
2760 else
2761 new_reversed = void_list_node;
2762 }
2763 }
2764
2765 /* Use copy_node to preserve as much as possible from original type
2766 (debug info, attribute lists etc.)
2767 Exception is METHOD_TYPEs must have THIS argument.
2768 When we are asked to remove it, we need to build new FUNCTION_TYPE
2769 instead. */
2770 if (TREE_CODE (orig_type) != METHOD_TYPE
2771 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0).copy_param
2772 && VEC_index (ipa_parm_adjustment_t, adjustments, 0).base_index == 0))
2773 {
2774 new_type = build_distinct_type_copy (orig_type);
2775 TYPE_ARG_TYPES (new_type) = new_reversed;
2776 }
2777 else
2778 {
2779 new_type
2780 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2781 new_reversed));
2782 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2783 DECL_VINDEX (fndecl) = NULL_TREE;
2784 }
2785
2786 /* When signature changes, we need to clear builtin info. */
2787 if (DECL_BUILT_IN (fndecl))
2788 {
2789 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2790 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2791 }
2792
2793 /* This is a new type, not a copy of an old type. Need to reassociate
2794 variants. We can handle everything except the main variant lazily. */
2795 t = TYPE_MAIN_VARIANT (orig_type);
2796 if (orig_type != t)
2797 {
2798 TYPE_MAIN_VARIANT (new_type) = t;
2799 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2800 TYPE_NEXT_VARIANT (t) = new_type;
2801 }
2802 else
2803 {
2804 TYPE_MAIN_VARIANT (new_type) = new_type;
2805 TYPE_NEXT_VARIANT (new_type) = NULL;
2806 }
2807
2808 TREE_TYPE (fndecl) = new_type;
2809 DECL_VIRTUAL_P (fndecl) = 0;
2810 if (otypes)
2811 VEC_free (tree, heap, otypes);
2812 VEC_free (tree, heap, oparms);
2813 }
2814
2815 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2816 If this is a directly recursive call, CS must be NULL. Otherwise it must
2817 contain the corresponding call graph edge. */
2818
2819 void
2820 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2821 ipa_parm_adjustment_vec adjustments)
2822 {
2823 VEC(tree, heap) *vargs;
2824 VEC(tree, gc) **debug_args = NULL;
2825 gimple new_stmt;
2826 gimple_stmt_iterator gsi;
2827 tree callee_decl;
2828 int i, len;
2829
2830 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2831 vargs = VEC_alloc (tree, heap, len);
2832 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
2833
2834 gsi = gsi_for_stmt (stmt);
2835 for (i = 0; i < len; i++)
2836 {
2837 struct ipa_parm_adjustment *adj;
2838
2839 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2840
2841 if (adj->copy_param)
2842 {
2843 tree arg = gimple_call_arg (stmt, adj->base_index);
2844
2845 VEC_quick_push (tree, vargs, arg);
2846 }
2847 else if (!adj->remove_param)
2848 {
2849 tree expr, base, off;
2850 location_t loc;
2851
2852 /* We create a new parameter out of the value of the old one, we can
2853 do the following kind of transformations:
2854
2855 - A scalar passed by reference is converted to a scalar passed by
2856 value. (adj->by_ref is false and the type of the original
2857 actual argument is a pointer to a scalar).
2858
2859 - A part of an aggregate is passed instead of the whole aggregate.
2860 The part can be passed either by value or by reference, this is
2861 determined by value of adj->by_ref. Moreover, the code below
2862 handles both situations when the original aggregate is passed by
2863 value (its type is not a pointer) and when it is passed by
2864 reference (it is a pointer to an aggregate).
2865
2866 When the new argument is passed by reference (adj->by_ref is true)
2867 it must be a part of an aggregate and therefore we form it by
2868 simply taking the address of a reference inside the original
2869 aggregate. */
2870
2871 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2872 base = gimple_call_arg (stmt, adj->base_index);
2873 loc = EXPR_LOCATION (base);
2874
2875 if (TREE_CODE (base) != ADDR_EXPR
2876 && POINTER_TYPE_P (TREE_TYPE (base)))
2877 off = build_int_cst (adj->alias_ptr_type,
2878 adj->offset / BITS_PER_UNIT);
2879 else
2880 {
2881 HOST_WIDE_INT base_offset;
2882 tree prev_base;
2883
2884 if (TREE_CODE (base) == ADDR_EXPR)
2885 base = TREE_OPERAND (base, 0);
2886 prev_base = base;
2887 base = get_addr_base_and_unit_offset (base, &base_offset);
2888 /* Aggregate arguments can have non-invariant addresses. */
2889 if (!base)
2890 {
2891 base = build_fold_addr_expr (prev_base);
2892 off = build_int_cst (adj->alias_ptr_type,
2893 adj->offset / BITS_PER_UNIT);
2894 }
2895 else if (TREE_CODE (base) == MEM_REF)
2896 {
2897 off = build_int_cst (adj->alias_ptr_type,
2898 base_offset
2899 + adj->offset / BITS_PER_UNIT);
2900 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2901 off);
2902 base = TREE_OPERAND (base, 0);
2903 }
2904 else
2905 {
2906 off = build_int_cst (adj->alias_ptr_type,
2907 base_offset
2908 + adj->offset / BITS_PER_UNIT);
2909 base = build_fold_addr_expr (base);
2910 }
2911 }
2912
2913 if (!adj->by_ref)
2914 {
2915 tree type = adj->type;
2916 unsigned int align;
2917 unsigned HOST_WIDE_INT misalign;
2918
2919 get_pointer_alignment_1 (base, &align, &misalign);
2920 misalign += (tree_to_double_int (off)
2921 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
2922 * BITS_PER_UNIT);
2923 misalign = misalign & (align - 1);
2924 if (misalign != 0)
2925 align = (misalign & -misalign);
2926 if (align < TYPE_ALIGN (type))
2927 type = build_aligned_type (type, align);
2928 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2929 }
2930 else
2931 {
2932 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2933 expr = build_fold_addr_expr (expr);
2934 }
2935
2936 expr = force_gimple_operand_gsi (&gsi, expr,
2937 adj->by_ref
2938 || is_gimple_reg_type (adj->type),
2939 NULL, true, GSI_SAME_STMT);
2940 VEC_quick_push (tree, vargs, expr);
2941 }
2942 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2943 {
2944 unsigned int ix;
2945 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2946 gimple def_temp;
2947
2948 arg = gimple_call_arg (stmt, adj->base_index);
2949 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2950 {
2951 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2952 continue;
2953 arg = fold_convert_loc (gimple_location (stmt),
2954 TREE_TYPE (origin), arg);
2955 }
2956 if (debug_args == NULL)
2957 debug_args = decl_debug_args_insert (callee_decl);
2958 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
2959 if (ddecl == origin)
2960 {
2961 ddecl = VEC_index (tree, *debug_args, ix + 1);
2962 break;
2963 }
2964 if (ddecl == NULL)
2965 {
2966 ddecl = make_node (DEBUG_EXPR_DECL);
2967 DECL_ARTIFICIAL (ddecl) = 1;
2968 TREE_TYPE (ddecl) = TREE_TYPE (origin);
2969 DECL_MODE (ddecl) = DECL_MODE (origin);
2970
2971 VEC_safe_push (tree, gc, *debug_args, origin);
2972 VEC_safe_push (tree, gc, *debug_args, ddecl);
2973 }
2974 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
2975 stmt);
2976 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
2977 }
2978 }
2979
2980 if (dump_file && (dump_flags & TDF_DETAILS))
2981 {
2982 fprintf (dump_file, "replacing stmt:");
2983 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2984 }
2985
2986 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2987 VEC_free (tree, heap, vargs);
2988 if (gimple_call_lhs (stmt))
2989 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2990
2991 gimple_set_block (new_stmt, gimple_block (stmt));
2992 if (gimple_has_location (stmt))
2993 gimple_set_location (new_stmt, gimple_location (stmt));
2994 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2995 gimple_call_copy_flags (new_stmt, stmt);
2996
2997 if (dump_file && (dump_flags & TDF_DETAILS))
2998 {
2999 fprintf (dump_file, "with stmt:");
3000 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3001 fprintf (dump_file, "\n");
3002 }
3003 gsi_replace (&gsi, new_stmt, true);
3004 if (cs)
3005 cgraph_set_call_stmt (cs, new_stmt);
3006 update_ssa (TODO_update_ssa);
3007 free_dominance_info (CDI_DOMINATORS);
3008 }
3009
3010 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3011
3012 static bool
3013 index_in_adjustments_multiple_times_p (int base_index,
3014 ipa_parm_adjustment_vec adjustments)
3015 {
3016 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
3017 bool one = false;
3018
3019 for (i = 0; i < len; i++)
3020 {
3021 struct ipa_parm_adjustment *adj;
3022 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
3023
3024 if (adj->base_index == base_index)
3025 {
3026 if (one)
3027 return true;
3028 else
3029 one = true;
3030 }
3031 }
3032 return false;
3033 }
3034
3035
3036 /* Return adjustments that should have the same effect on function parameters
3037 and call arguments as if they were first changed according to adjustments in
3038 INNER and then by adjustments in OUTER. */
3039
3040 ipa_parm_adjustment_vec
3041 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3042 ipa_parm_adjustment_vec outer)
3043 {
3044 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
3045 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
3046 int removals = 0;
3047 ipa_parm_adjustment_vec adjustments, tmp;
3048
3049 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
3050 for (i = 0; i < inlen; i++)
3051 {
3052 struct ipa_parm_adjustment *n;
3053 n = &VEC_index (ipa_parm_adjustment_t, inner, i);
3054
3055 if (n->remove_param)
3056 removals++;
3057 else
3058 VEC_quick_push (ipa_parm_adjustment_t, tmp, *n);
3059 }
3060
3061 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
3062 for (i = 0; i < outlen; i++)
3063 {
3064 struct ipa_parm_adjustment r;
3065 struct ipa_parm_adjustment *out = &VEC_index (ipa_parm_adjustment_t,
3066 outer, i);
3067 struct ipa_parm_adjustment *in = &VEC_index (ipa_parm_adjustment_t, tmp,
3068 out->base_index);
3069
3070 memset (&r, 0, sizeof (r));
3071 gcc_assert (!in->remove_param);
3072 if (out->remove_param)
3073 {
3074 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3075 {
3076 r.remove_param = true;
3077 VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
3078 }
3079 continue;
3080 }
3081
3082 r.base_index = in->base_index;
3083 r.type = out->type;
3084
3085 /* FIXME: Create nonlocal value too. */
3086
3087 if (in->copy_param && out->copy_param)
3088 r.copy_param = true;
3089 else if (in->copy_param)
3090 r.offset = out->offset;
3091 else if (out->copy_param)
3092 r.offset = in->offset;
3093 else
3094 r.offset = in->offset + out->offset;
3095 VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
3096 }
3097
3098 for (i = 0; i < inlen; i++)
3099 {
3100 struct ipa_parm_adjustment *n = &VEC_index (ipa_parm_adjustment_t,
3101 inner, i);
3102
3103 if (n->remove_param)
3104 VEC_quick_push (ipa_parm_adjustment_t, adjustments, *n);
3105 }
3106
3107 VEC_free (ipa_parm_adjustment_t, heap, tmp);
3108 return adjustments;
3109 }
3110
3111 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3112 friendly way, assuming they are meant to be applied to FNDECL. */
3113
3114 void
3115 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3116 tree fndecl)
3117 {
3118 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
3119 bool first = true;
3120 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
3121
3122 fprintf (file, "IPA param adjustments: ");
3123 for (i = 0; i < len; i++)
3124 {
3125 struct ipa_parm_adjustment *adj;
3126 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
3127
3128 if (!first)
3129 fprintf (file, " ");
3130 else
3131 first = false;
3132
3133 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3134 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
3135 if (adj->base)
3136 {
3137 fprintf (file, ", base: ");
3138 print_generic_expr (file, adj->base, 0);
3139 }
3140 if (adj->reduction)
3141 {
3142 fprintf (file, ", reduction: ");
3143 print_generic_expr (file, adj->reduction, 0);
3144 }
3145 if (adj->new_ssa_base)
3146 {
3147 fprintf (file, ", new_ssa_base: ");
3148 print_generic_expr (file, adj->new_ssa_base, 0);
3149 }
3150
3151 if (adj->copy_param)
3152 fprintf (file, ", copy_param");
3153 else if (adj->remove_param)
3154 fprintf (file, ", remove_param");
3155 else
3156 fprintf (file, ", offset %li", (long) adj->offset);
3157 if (adj->by_ref)
3158 fprintf (file, ", by_ref");
3159 print_node_brief (file, ", type: ", adj->type, 0);
3160 fprintf (file, "\n");
3161 }
3162 VEC_free (tree, heap, parms);
3163 }
3164
3165 /* Stream out jump function JUMP_FUNC to OB. */
3166
3167 static void
3168 ipa_write_jump_function (struct output_block *ob,
3169 struct ipa_jump_func *jump_func)
3170 {
3171 struct ipa_agg_jf_item *item;
3172 struct bitpack_d bp;
3173 int i, count;
3174
3175 streamer_write_uhwi (ob, jump_func->type);
3176 switch (jump_func->type)
3177 {
3178 case IPA_JF_UNKNOWN:
3179 break;
3180 case IPA_JF_KNOWN_TYPE:
3181 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3182 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3183 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3184 break;
3185 case IPA_JF_CONST:
3186 gcc_assert (
3187 EXPR_LOCATION (jump_func->value.constant) == UNKNOWN_LOCATION);
3188 stream_write_tree (ob, jump_func->value.constant, true);
3189 break;
3190 case IPA_JF_PASS_THROUGH:
3191 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3192 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3193 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3194 bp = bitpack_create (ob->main_stream);
3195 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3196 streamer_write_bitpack (&bp);
3197 break;
3198 case IPA_JF_ANCESTOR:
3199 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3200 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3201 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3202 bp = bitpack_create (ob->main_stream);
3203 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3204 streamer_write_bitpack (&bp);
3205 break;
3206 }
3207
3208 count = VEC_length (ipa_agg_jf_item_t, jump_func->agg.items);
3209 streamer_write_uhwi (ob, count);
3210 if (count)
3211 {
3212 bp = bitpack_create (ob->main_stream);
3213 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3214 streamer_write_bitpack (&bp);
3215 }
3216
3217 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items, i, item)
3218 {
3219 streamer_write_uhwi (ob, item->offset);
3220 stream_write_tree (ob, item->value, true);
3221 }
3222 }
3223
3224 /* Read in jump function JUMP_FUNC from IB. */
3225
3226 static void
3227 ipa_read_jump_function (struct lto_input_block *ib,
3228 struct ipa_jump_func *jump_func,
3229 struct data_in *data_in)
3230 {
3231 struct bitpack_d bp;
3232 int i, count;
3233
3234 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
3235 switch (jump_func->type)
3236 {
3237 case IPA_JF_UNKNOWN:
3238 break;
3239 case IPA_JF_KNOWN_TYPE:
3240 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
3241 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
3242 jump_func->value.known_type.component_type = stream_read_tree (ib,
3243 data_in);
3244 break;
3245 case IPA_JF_CONST:
3246 jump_func->value.constant = stream_read_tree (ib, data_in);
3247 break;
3248 case IPA_JF_PASS_THROUGH:
3249 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
3250 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
3251 jump_func->value.pass_through.operation
3252 = (enum tree_code) streamer_read_uhwi (ib);
3253 bp = streamer_read_bitpack (ib);
3254 jump_func->value.pass_through.agg_preserved = bp_unpack_value (&bp, 1);
3255 break;
3256 case IPA_JF_ANCESTOR:
3257 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
3258 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
3259 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
3260 bp = streamer_read_bitpack (ib);
3261 jump_func->value.ancestor.agg_preserved = bp_unpack_value (&bp, 1);
3262 break;
3263 }
3264
3265 count = streamer_read_uhwi (ib);
3266 jump_func->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, count);
3267 if (count)
3268 {
3269 bp = streamer_read_bitpack (ib);
3270 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3271 }
3272 for (i = 0; i < count; i++)
3273 {
3274 struct ipa_agg_jf_item item;
3275 item.offset = streamer_read_uhwi (ib);
3276 item.value = stream_read_tree (ib, data_in);
3277 VEC_quick_push (ipa_agg_jf_item_t, jump_func->agg.items, item);
3278 }
3279 }
3280
3281 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3282 relevant to indirect inlining to OB. */
3283
3284 static void
3285 ipa_write_indirect_edge_info (struct output_block *ob,
3286 struct cgraph_edge *cs)
3287 {
3288 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3289 struct bitpack_d bp;
3290
3291 streamer_write_hwi (ob, ii->param_index);
3292 streamer_write_hwi (ob, ii->offset);
3293 bp = bitpack_create (ob->main_stream);
3294 bp_pack_value (&bp, ii->polymorphic, 1);
3295 bp_pack_value (&bp, ii->agg_contents, 1);
3296 bp_pack_value (&bp, ii->by_ref, 1);
3297 streamer_write_bitpack (&bp);
3298
3299 if (ii->polymorphic)
3300 {
3301 streamer_write_hwi (ob, ii->otr_token);
3302 stream_write_tree (ob, ii->otr_type, true);
3303 }
3304 }
3305
3306 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3307 relevant to indirect inlining from IB. */
3308
3309 static void
3310 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3311 struct data_in *data_in ATTRIBUTE_UNUSED,
3312 struct cgraph_edge *cs)
3313 {
3314 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3315 struct bitpack_d bp;
3316
3317 ii->param_index = (int) streamer_read_hwi (ib);
3318 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3319 bp = streamer_read_bitpack (ib);
3320 ii->polymorphic = bp_unpack_value (&bp, 1);
3321 ii->agg_contents = bp_unpack_value (&bp, 1);
3322 ii->by_ref = bp_unpack_value (&bp, 1);
3323 if (ii->polymorphic)
3324 {
3325 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3326 ii->otr_type = stream_read_tree (ib, data_in);
3327 }
3328 }
3329
3330 /* Stream out NODE info to OB. */
3331
3332 static void
3333 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3334 {
3335 int node_ref;
3336 lto_symtab_encoder_t encoder;
3337 struct ipa_node_params *info = IPA_NODE_REF (node);
3338 int j;
3339 struct cgraph_edge *e;
3340 struct bitpack_d bp;
3341
3342 encoder = ob->decl_state->symtab_node_encoder;
3343 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3344 streamer_write_uhwi (ob, node_ref);
3345
3346 bp = bitpack_create (ob->main_stream);
3347 gcc_assert (info->uses_analysis_done
3348 || ipa_get_param_count (info) == 0);
3349 gcc_assert (!info->node_enqueued);
3350 gcc_assert (!info->ipcp_orig_node);
3351 for (j = 0; j < ipa_get_param_count (info); j++)
3352 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3353 streamer_write_bitpack (&bp);
3354 for (e = node->callees; e; e = e->next_callee)
3355 {
3356 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3357
3358 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3359 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3360 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3361 }
3362 for (e = node->indirect_calls; e; e = e->next_callee)
3363 {
3364 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3365
3366 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3367 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3368 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3369 ipa_write_indirect_edge_info (ob, e);
3370 }
3371 }
3372
3373 /* Stream in NODE info from IB. */
3374
3375 static void
3376 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3377 struct data_in *data_in)
3378 {
3379 struct ipa_node_params *info = IPA_NODE_REF (node);
3380 int k;
3381 struct cgraph_edge *e;
3382 struct bitpack_d bp;
3383
3384 ipa_initialize_node_params (node);
3385
3386 bp = streamer_read_bitpack (ib);
3387 if (ipa_get_param_count (info) != 0)
3388 info->uses_analysis_done = true;
3389 info->node_enqueued = false;
3390 for (k = 0; k < ipa_get_param_count (info); k++)
3391 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3392 for (e = node->callees; e; e = e->next_callee)
3393 {
3394 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3395 int count = streamer_read_uhwi (ib);
3396
3397 if (!count)
3398 continue;
3399 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
3400
3401 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3402 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
3403 }
3404 for (e = node->indirect_calls; e; e = e->next_callee)
3405 {
3406 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3407 int count = streamer_read_uhwi (ib);
3408
3409 if (count)
3410 {
3411 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
3412 count);
3413 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3414 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
3415 data_in);
3416 }
3417 ipa_read_indirect_edge_info (ib, data_in, e);
3418 }
3419 }
3420
3421 /* Write jump functions for nodes in SET. */
3422
3423 void
3424 ipa_prop_write_jump_functions (void)
3425 {
3426 struct cgraph_node *node;
3427 struct output_block *ob;
3428 unsigned int count = 0;
3429 lto_symtab_encoder_iterator lsei;
3430 lto_symtab_encoder_t encoder;
3431
3432
3433 if (!ipa_node_params_vector)
3434 return;
3435
3436 ob = create_output_block (LTO_section_jump_functions);
3437 encoder = ob->decl_state->symtab_node_encoder;
3438 ob->cgraph_node = NULL;
3439 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3440 lsei_next_function_in_partition (&lsei))
3441 {
3442 node = lsei_cgraph_node (lsei);
3443 if (cgraph_function_with_gimple_body_p (node)
3444 && IPA_NODE_REF (node) != NULL)
3445 count++;
3446 }
3447
3448 streamer_write_uhwi (ob, count);
3449
3450 /* Process all of the functions. */
3451 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3452 lsei_next_function_in_partition (&lsei))
3453 {
3454 node = lsei_cgraph_node (lsei);
3455 if (cgraph_function_with_gimple_body_p (node)
3456 && IPA_NODE_REF (node) != NULL)
3457 ipa_write_node_info (ob, node);
3458 }
3459 streamer_write_char_stream (ob->main_stream, 0);
3460 produce_asm (ob, NULL);
3461 destroy_output_block (ob);
3462 }
3463
3464 /* Read section in file FILE_DATA of length LEN with data DATA. */
3465
3466 static void
3467 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3468 size_t len)
3469 {
3470 const struct lto_function_header *header =
3471 (const struct lto_function_header *) data;
3472 const int cfg_offset = sizeof (struct lto_function_header);
3473 const int main_offset = cfg_offset + header->cfg_size;
3474 const int string_offset = main_offset + header->main_size;
3475 struct data_in *data_in;
3476 struct lto_input_block ib_main;
3477 unsigned int i;
3478 unsigned int count;
3479
3480 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3481 header->main_size);
3482
3483 data_in =
3484 lto_data_in_create (file_data, (const char *) data + string_offset,
3485 header->string_size, NULL);
3486 count = streamer_read_uhwi (&ib_main);
3487
3488 for (i = 0; i < count; i++)
3489 {
3490 unsigned int index;
3491 struct cgraph_node *node;
3492 lto_symtab_encoder_t encoder;
3493
3494 index = streamer_read_uhwi (&ib_main);
3495 encoder = file_data->symtab_node_encoder;
3496 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3497 gcc_assert (node->analyzed);
3498 ipa_read_node_info (&ib_main, node, data_in);
3499 }
3500 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3501 len);
3502 lto_data_in_delete (data_in);
3503 }
3504
3505 /* Read ipcp jump functions. */
3506
3507 void
3508 ipa_prop_read_jump_functions (void)
3509 {
3510 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3511 struct lto_file_decl_data *file_data;
3512 unsigned int j = 0;
3513
3514 ipa_check_create_node_params ();
3515 ipa_check_create_edge_args ();
3516 ipa_register_cgraph_hooks ();
3517
3518 while ((file_data = file_data_vec[j++]))
3519 {
3520 size_t len;
3521 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3522
3523 if (data)
3524 ipa_prop_read_section (file_data, data, len);
3525 }
3526 }
3527
3528 /* After merging units, we can get mismatch in argument counts.
3529 Also decl merging might've rendered parameter lists obsolete.
3530 Also compute called_with_variable_arg info. */
3531
3532 void
3533 ipa_update_after_lto_read (void)
3534 {
3535 struct cgraph_node *node;
3536
3537 ipa_check_create_node_params ();
3538 ipa_check_create_edge_args ();
3539
3540 FOR_EACH_DEFINED_FUNCTION (node)
3541 if (node->analyzed)
3542 ipa_initialize_node_params (node);
3543 }