Remove unnecessary VEC function overloads.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
41
42
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
45
46 struct param_analysis_info
47 {
48 bool parm_modified, ref_modified, pt_modified;
49 bitmap parm_visited_statements, pt_visited_statements;
50 };
51
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
56
57 /* Holders of ipa cgraph hooks: */
58 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
59 static struct cgraph_node_hook_list *node_removal_hook_holder;
60 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
61 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
62 static struct cgraph_node_hook_list *function_insertion_hook_holder;
63
64 /* Return index of the formal whose tree is PTREE in function which corresponds
65 to INFO. */
66
67 int
68 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
69 {
70 int i, count;
71
72 count = ipa_get_param_count (info);
73 for (i = 0; i < count; i++)
74 if (ipa_get_param (info, i) == ptree)
75 return i;
76
77 return -1;
78 }
79
80 /* Populate the param_decl field in parameter descriptors of INFO that
81 corresponds to NODE. */
82
83 static void
84 ipa_populate_param_decls (struct cgraph_node *node,
85 struct ipa_node_params *info)
86 {
87 tree fndecl;
88 tree fnargs;
89 tree parm;
90 int param_num;
91
92 fndecl = node->symbol.decl;
93 fnargs = DECL_ARGUMENTS (fndecl);
94 param_num = 0;
95 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
96 {
97 VEC_index (ipa_param_descriptor_t,
98 info->descriptors, param_num).decl = parm;
99 param_num++;
100 }
101 }
102
103 /* Return how many formal parameters FNDECL has. */
104
105 static inline int
106 count_formal_params (tree fndecl)
107 {
108 tree parm;
109 int count = 0;
110
111 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
112 count++;
113
114 return count;
115 }
116
117 /* Initialize the ipa_node_params structure associated with NODE by counting
118 the function parameters, creating the descriptors and populating their
119 param_decls. */
120
121 void
122 ipa_initialize_node_params (struct cgraph_node *node)
123 {
124 struct ipa_node_params *info = IPA_NODE_REF (node);
125
126 if (!info->descriptors)
127 {
128 int param_count;
129
130 param_count = count_formal_params (node->symbol.decl);
131 if (param_count)
132 {
133 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
134 info->descriptors, param_count);
135 ipa_populate_param_decls (node, info);
136 }
137 }
138 }
139
140 /* Print the jump functions associated with call graph edge CS to file F. */
141
142 static void
143 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
144 {
145 int i, count;
146
147 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
148 for (i = 0; i < count; i++)
149 {
150 struct ipa_jump_func *jump_func;
151 enum jump_func_type type;
152
153 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
154 type = jump_func->type;
155
156 fprintf (f, " param %d: ", i);
157 if (type == IPA_JF_UNKNOWN)
158 fprintf (f, "UNKNOWN\n");
159 else if (type == IPA_JF_KNOWN_TYPE)
160 {
161 fprintf (f, "KNOWN TYPE: base ");
162 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
163 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
164 jump_func->value.known_type.offset);
165 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
166 fprintf (f, "\n");
167 }
168 else if (type == IPA_JF_CONST)
169 {
170 tree val = jump_func->value.constant;
171 fprintf (f, "CONST: ");
172 print_generic_expr (f, val, 0);
173 if (TREE_CODE (val) == ADDR_EXPR
174 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
175 {
176 fprintf (f, " -> ");
177 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
178 0);
179 }
180 fprintf (f, "\n");
181 }
182 else if (type == IPA_JF_PASS_THROUGH)
183 {
184 fprintf (f, "PASS THROUGH: ");
185 fprintf (f, "%d, op %s",
186 jump_func->value.pass_through.formal_id,
187 tree_code_name[(int)
188 jump_func->value.pass_through.operation]);
189 if (jump_func->value.pass_through.operation != NOP_EXPR)
190 {
191 fprintf (f, " ");
192 print_generic_expr (f,
193 jump_func->value.pass_through.operand, 0);
194 }
195 if (jump_func->value.pass_through.agg_preserved)
196 fprintf (f, ", agg_preserved");
197 fprintf (f, "\n");
198 }
199 else if (type == IPA_JF_ANCESTOR)
200 {
201 fprintf (f, "ANCESTOR: ");
202 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
203 jump_func->value.ancestor.formal_id,
204 jump_func->value.ancestor.offset);
205 print_generic_expr (f, jump_func->value.ancestor.type, 0);
206 if (jump_func->value.ancestor.agg_preserved)
207 fprintf (f, ", agg_preserved");
208 fprintf (f, "\n");
209 }
210
211 if (jump_func->agg.items)
212 {
213 struct ipa_agg_jf_item *item;
214 int j;
215
216 fprintf (f, " Aggregate passed by %s:\n",
217 jump_func->agg.by_ref ? "reference" : "value");
218 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items,
219 j, item)
220 {
221 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
222 item->offset);
223 if (TYPE_P (item->value))
224 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
225 tree_low_cst (TYPE_SIZE (item->value), 1));
226 else
227 {
228 fprintf (f, "cst: ");
229 print_generic_expr (f, item->value, 0);
230 }
231 fprintf (f, "\n");
232 }
233 }
234 }
235 }
236
237
238 /* Print the jump functions of all arguments on all call graph edges going from
239 NODE to file F. */
240
241 void
242 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
243 {
244 struct cgraph_edge *cs;
245 int i;
246
247 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
248 for (cs = node->callees; cs; cs = cs->next_callee)
249 {
250 if (!ipa_edge_args_info_available_for_edge_p (cs))
251 continue;
252
253 fprintf (f, " callsite %s/%i -> %s/%i : \n",
254 xstrdup (cgraph_node_name (node)), node->uid,
255 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
256 ipa_print_node_jump_functions_for_edge (f, cs);
257 }
258
259 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
260 {
261 if (!ipa_edge_args_info_available_for_edge_p (cs))
262 continue;
263
264 if (cs->call_stmt)
265 {
266 fprintf (f, " indirect callsite %d for stmt ", i);
267 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
268 }
269 else
270 fprintf (f, " indirect callsite %d :\n", i);
271 ipa_print_node_jump_functions_for_edge (f, cs);
272
273 }
274 }
275
276 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
277
278 void
279 ipa_print_all_jump_functions (FILE *f)
280 {
281 struct cgraph_node *node;
282
283 fprintf (f, "\nJump functions:\n");
284 FOR_EACH_FUNCTION (node)
285 {
286 ipa_print_node_jump_functions (f, node);
287 }
288 }
289
290 /* Set JFUNC to be a known type jump function. */
291
292 static void
293 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
294 tree base_type, tree component_type)
295 {
296 jfunc->type = IPA_JF_KNOWN_TYPE;
297 jfunc->value.known_type.offset = offset,
298 jfunc->value.known_type.base_type = base_type;
299 jfunc->value.known_type.component_type = component_type;
300 }
301
302 /* Set JFUNC to be a constant jmp function. */
303
304 static void
305 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
306 {
307 jfunc->type = IPA_JF_CONST;
308 jfunc->value.constant = constant;
309 }
310
311 /* Set JFUNC to be a simple pass-through jump function. */
312 static void
313 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
314 bool agg_preserved)
315 {
316 jfunc->type = IPA_JF_PASS_THROUGH;
317 jfunc->value.pass_through.operand = NULL_TREE;
318 jfunc->value.pass_through.formal_id = formal_id;
319 jfunc->value.pass_through.operation = NOP_EXPR;
320 jfunc->value.pass_through.agg_preserved = agg_preserved;
321 }
322
323 /* Set JFUNC to be an arithmetic pass through jump function. */
324
325 static void
326 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
327 tree operand, enum tree_code operation)
328 {
329 jfunc->type = IPA_JF_PASS_THROUGH;
330 jfunc->value.pass_through.operand = operand;
331 jfunc->value.pass_through.formal_id = formal_id;
332 jfunc->value.pass_through.operation = operation;
333 jfunc->value.pass_through.agg_preserved = false;
334 }
335
336 /* Set JFUNC to be an ancestor jump function. */
337
338 static void
339 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
340 tree type, int formal_id, bool agg_preserved)
341 {
342 jfunc->type = IPA_JF_ANCESTOR;
343 jfunc->value.ancestor.formal_id = formal_id;
344 jfunc->value.ancestor.offset = offset;
345 jfunc->value.ancestor.type = type;
346 jfunc->value.ancestor.agg_preserved = agg_preserved;
347 }
348
349 /* Structure to be passed in between detect_type_change and
350 check_stmt_for_type_change. */
351
352 struct type_change_info
353 {
354 /* Offset into the object where there is the virtual method pointer we are
355 looking for. */
356 HOST_WIDE_INT offset;
357 /* The declaration or SSA_NAME pointer of the base that we are checking for
358 type change. */
359 tree object;
360 /* If we actually can tell the type that the object has changed to, it is
361 stored in this field. Otherwise it remains NULL_TREE. */
362 tree known_current_type;
363 /* Set to true if dynamic type change has been detected. */
364 bool type_maybe_changed;
365 /* Set to true if multiple types have been encountered. known_current_type
366 must be disregarded in that case. */
367 bool multiple_types_encountered;
368 };
369
370 /* Return true if STMT can modify a virtual method table pointer.
371
372 This function makes special assumptions about both constructors and
373 destructors which are all the functions that are allowed to alter the VMT
374 pointers. It assumes that destructors begin with assignment into all VMT
375 pointers and that constructors essentially look in the following way:
376
377 1) The very first thing they do is that they call constructors of ancestor
378 sub-objects that have them.
379
380 2) Then VMT pointers of this and all its ancestors is set to new values
381 corresponding to the type corresponding to the constructor.
382
383 3) Only afterwards, other stuff such as constructor of member sub-objects
384 and the code written by the user is run. Only this may include calling
385 virtual functions, directly or indirectly.
386
387 There is no way to call a constructor of an ancestor sub-object in any
388 other way.
389
390 This means that we do not have to care whether constructors get the correct
391 type information because they will always change it (in fact, if we define
392 the type to be given by the VMT pointer, it is undefined).
393
394 The most important fact to derive from the above is that if, for some
395 statement in the section 3, we try to detect whether the dynamic type has
396 changed, we can safely ignore all calls as we examine the function body
397 backwards until we reach statements in section 2 because these calls cannot
398 be ancestor constructors or destructors (if the input is not bogus) and so
399 do not change the dynamic type (this holds true only for automatically
400 allocated objects but at the moment we devirtualize only these). We then
401 must detect that statements in section 2 change the dynamic type and can try
402 to derive the new type. That is enough and we can stop, we will never see
403 the calls into constructors of sub-objects in this code. Therefore we can
404 safely ignore all call statements that we traverse.
405 */
406
407 static bool
408 stmt_may_be_vtbl_ptr_store (gimple stmt)
409 {
410 if (is_gimple_call (stmt))
411 return false;
412 else if (is_gimple_assign (stmt))
413 {
414 tree lhs = gimple_assign_lhs (stmt);
415
416 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
417 {
418 if (flag_strict_aliasing
419 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
420 return false;
421
422 if (TREE_CODE (lhs) == COMPONENT_REF
423 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
424 return false;
425 /* In the future we might want to use get_base_ref_and_offset to find
426 if there is a field corresponding to the offset and if so, proceed
427 almost like if it was a component ref. */
428 }
429 }
430 return true;
431 }
432
433 /* If STMT can be proved to be an assignment to the virtual method table
434 pointer of ANALYZED_OBJ and the type associated with the new table
435 identified, return the type. Otherwise return NULL_TREE. */
436
437 static tree
438 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
439 {
440 HOST_WIDE_INT offset, size, max_size;
441 tree lhs, rhs, base;
442
443 if (!gimple_assign_single_p (stmt))
444 return NULL_TREE;
445
446 lhs = gimple_assign_lhs (stmt);
447 rhs = gimple_assign_rhs1 (stmt);
448 if (TREE_CODE (lhs) != COMPONENT_REF
449 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
450 || TREE_CODE (rhs) != ADDR_EXPR)
451 return NULL_TREE;
452 rhs = get_base_address (TREE_OPERAND (rhs, 0));
453 if (!rhs
454 || TREE_CODE (rhs) != VAR_DECL
455 || !DECL_VIRTUAL_P (rhs))
456 return NULL_TREE;
457
458 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
459 if (offset != tci->offset
460 || size != POINTER_SIZE
461 || max_size != POINTER_SIZE)
462 return NULL_TREE;
463 if (TREE_CODE (base) == MEM_REF)
464 {
465 if (TREE_CODE (tci->object) != MEM_REF
466 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
467 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
468 TREE_OPERAND (base, 1)))
469 return NULL_TREE;
470 }
471 else if (tci->object != base)
472 return NULL_TREE;
473
474 return DECL_CONTEXT (rhs);
475 }
476
477 /* Callback of walk_aliased_vdefs and a helper function for
478 detect_type_change to check whether a particular statement may modify
479 the virtual table pointer, and if possible also determine the new type of
480 the (sub-)object. It stores its result into DATA, which points to a
481 type_change_info structure. */
482
483 static bool
484 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
485 {
486 gimple stmt = SSA_NAME_DEF_STMT (vdef);
487 struct type_change_info *tci = (struct type_change_info *) data;
488
489 if (stmt_may_be_vtbl_ptr_store (stmt))
490 {
491 tree type;
492 type = extr_type_from_vtbl_ptr_store (stmt, tci);
493 if (tci->type_maybe_changed
494 && type != tci->known_current_type)
495 tci->multiple_types_encountered = true;
496 tci->known_current_type = type;
497 tci->type_maybe_changed = true;
498 return true;
499 }
500 else
501 return false;
502 }
503
504
505
506 /* Like detect_type_change but with extra argument COMP_TYPE which will become
507 the component type part of new JFUNC of dynamic type change is detected and
508 the new base type is identified. */
509
510 static bool
511 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
512 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
513 {
514 struct type_change_info tci;
515 ao_ref ao;
516
517 gcc_checking_assert (DECL_P (arg)
518 || TREE_CODE (arg) == MEM_REF
519 || handled_component_p (arg));
520 /* Const calls cannot call virtual methods through VMT and so type changes do
521 not matter. */
522 if (!flag_devirtualize || !gimple_vuse (call))
523 return false;
524
525 ao_ref_init (&ao, arg);
526 ao.base = base;
527 ao.offset = offset;
528 ao.size = POINTER_SIZE;
529 ao.max_size = ao.size;
530
531 tci.offset = offset;
532 tci.object = get_base_address (arg);
533 tci.known_current_type = NULL_TREE;
534 tci.type_maybe_changed = false;
535 tci.multiple_types_encountered = false;
536
537 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
538 &tci, NULL);
539 if (!tci.type_maybe_changed)
540 return false;
541
542 if (!tci.known_current_type
543 || tci.multiple_types_encountered
544 || offset != 0)
545 jfunc->type = IPA_JF_UNKNOWN;
546 else
547 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
548
549 return true;
550 }
551
552 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
553 looking for assignments to its virtual table pointer. If it is, return true
554 and fill in the jump function JFUNC with relevant type information or set it
555 to unknown. ARG is the object itself (not a pointer to it, unless
556 dereferenced). BASE is the base of the memory access as returned by
557 get_ref_base_and_extent, as is the offset. */
558
559 static bool
560 detect_type_change (tree arg, tree base, gimple call,
561 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
562 {
563 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
564 }
565
566 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
567 SSA name (its dereference will become the base and the offset is assumed to
568 be zero). */
569
570 static bool
571 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
572 {
573 tree comp_type;
574
575 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
576 if (!flag_devirtualize
577 || !POINTER_TYPE_P (TREE_TYPE (arg))
578 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
579 return false;
580
581 comp_type = TREE_TYPE (TREE_TYPE (arg));
582 arg = build2 (MEM_REF, ptr_type_node, arg,
583 build_int_cst (ptr_type_node, 0));
584
585 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
586 }
587
588 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
589 boolean variable pointed to by DATA. */
590
591 static bool
592 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
593 void *data)
594 {
595 bool *b = (bool *) data;
596 *b = true;
597 return true;
598 }
599
600 /* Return true if a load from a formal parameter PARM_LOAD is known to retreive
601 a value known not to be modified in this function before reaching the
602 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
603 information about the parameter. */
604
605 static bool
606 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
607 gimple stmt, tree parm_load)
608 {
609 bool modified = false;
610 bitmap *visited_stmts;
611 ao_ref refd;
612
613 if (parm_ainfo && parm_ainfo->parm_modified)
614 return false;
615
616 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
617 ao_ref_init (&refd, parm_load);
618 /* We can cache visited statements only when parm_ainfo is available and when
619 we are looking at a naked load of the whole parameter. */
620 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
621 visited_stmts = NULL;
622 else
623 visited_stmts = &parm_ainfo->parm_visited_statements;
624 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
625 visited_stmts);
626 if (parm_ainfo && modified)
627 parm_ainfo->parm_modified = true;
628 return !modified;
629 }
630
631 /* If STMT is an assignment that loads a value from an parameter declaration,
632 return the index of the parameter in ipa_node_params which has not been
633 modified. Otherwise return -1. */
634
635 static int
636 load_from_unmodified_param (struct ipa_node_params *info,
637 struct param_analysis_info *parms_ainfo,
638 gimple stmt)
639 {
640 int index;
641 tree op1;
642
643 if (!gimple_assign_single_p (stmt))
644 return -1;
645
646 op1 = gimple_assign_rhs1 (stmt);
647 if (TREE_CODE (op1) != PARM_DECL)
648 return -1;
649
650 index = ipa_get_param_decl_index (info, op1);
651 if (index < 0
652 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
653 : NULL, stmt, op1))
654 return -1;
655
656 return index;
657 }
658
659 /* Return true if memory reference REF loads data that are known to be
660 unmodified in this function before reaching statement STMT. PARM_AINFO, if
661 non-NULL, is a pointer to a structure containing temporary information about
662 PARM. */
663
664 static bool
665 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
666 gimple stmt, tree ref)
667 {
668 bool modified = false;
669 ao_ref refd;
670
671 gcc_checking_assert (gimple_vuse (stmt));
672 if (parm_ainfo && parm_ainfo->ref_modified)
673 return false;
674
675 ao_ref_init (&refd, ref);
676 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
677 NULL);
678 if (parm_ainfo && modified)
679 parm_ainfo->ref_modified = true;
680 return !modified;
681 }
682
683 /* Return true if the data pointed to by PARM is known to be unmodified in this
684 function before reaching call statement CALL into which it is passed.
685 PARM_AINFO is a pointer to a structure containing temporary information
686 about PARM. */
687
688 static bool
689 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
690 gimple call, tree parm)
691 {
692 bool modified = false;
693 ao_ref refd;
694
695 /* It's unnecessary to calculate anything about memory contnets for a const
696 function because it is not goin to use it. But do not cache the result
697 either. Also, no such calculations for non-pointers. */
698 if (!gimple_vuse (call)
699 || !POINTER_TYPE_P (TREE_TYPE (parm)))
700 return false;
701
702 if (parm_ainfo->pt_modified)
703 return false;
704
705 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
706 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
707 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
708 if (modified)
709 parm_ainfo->pt_modified = true;
710 return !modified;
711 }
712
713 /* Return true if we can prove that OP is a memory reference loading unmodified
714 data from an aggregate passed as a parameter and if the aggregate is passed
715 by reference, that the alias type of the load corresponds to the type of the
716 formal parameter (so that we can rely on this type for TBAA in callers).
717 INFO and PARMS_AINFO describe parameters of the current function (but the
718 latter can be NULL), STMT is the load statement. If function returns true,
719 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
720 within the aggregate and whether it is a load from a value passed by
721 reference respectively. */
722
723 static bool
724 ipa_load_from_parm_agg_1 (struct ipa_node_params *info,
725 struct param_analysis_info *parms_ainfo, gimple stmt,
726 tree op, int *index_p, HOST_WIDE_INT *offset_p,
727 bool *by_ref_p)
728 {
729 int index;
730 HOST_WIDE_INT size, max_size;
731 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
732
733 if (max_size == -1 || max_size != size || *offset_p < 0)
734 return false;
735
736 if (DECL_P (base))
737 {
738 int index = ipa_get_param_decl_index (info, base);
739 if (index >= 0
740 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
741 : NULL, stmt, op))
742 {
743 *index_p = index;
744 *by_ref_p = false;
745 return true;
746 }
747 return false;
748 }
749
750 if (TREE_CODE (base) != MEM_REF
751 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
752 || !integer_zerop (TREE_OPERAND (base, 1)))
753 return false;
754
755 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
756 {
757 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
758 index = ipa_get_param_decl_index (info, parm);
759 }
760 else
761 {
762 /* This branch catches situations where a pointer parameter is not a
763 gimple register, for example:
764
765 void hip7(S*) (struct S * p)
766 {
767 void (*<T2e4>) (struct S *) D.1867;
768 struct S * p.1;
769
770 <bb 2>:
771 p.1_1 = p;
772 D.1867_2 = p.1_1->f;
773 D.1867_2 ();
774 gdp = &p;
775 */
776
777 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
778 index = load_from_unmodified_param (info, parms_ainfo, def);
779 }
780
781 if (index >= 0
782 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
783 stmt, op))
784 {
785 *index_p = index;
786 *by_ref_p = true;
787 return true;
788 }
789 return false;
790 }
791
792 /* Just like the previous function, just without the param_analysis_info
793 pointer, for users outside of this file. */
794
795 bool
796 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
797 tree op, int *index_p, HOST_WIDE_INT *offset_p,
798 bool *by_ref_p)
799 {
800 return ipa_load_from_parm_agg_1 (info, NULL, stmt, op, index_p, offset_p,
801 by_ref_p);
802 }
803
804 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
805 of an assignment statement STMT, try to determine whether we are actually
806 handling any of the following cases and construct an appropriate jump
807 function into JFUNC if so:
808
809 1) The passed value is loaded from a formal parameter which is not a gimple
810 register (most probably because it is addressable, the value has to be
811 scalar) and we can guarantee the value has not changed. This case can
812 therefore be described by a simple pass-through jump function. For example:
813
814 foo (int a)
815 {
816 int a.0;
817
818 a.0_2 = a;
819 bar (a.0_2);
820
821 2) The passed value can be described by a simple arithmetic pass-through
822 jump function. E.g.
823
824 foo (int a)
825 {
826 int D.2064;
827
828 D.2064_4 = a.1(D) + 4;
829 bar (D.2064_4);
830
831 This case can also occur in combination of the previous one, e.g.:
832
833 foo (int a, int z)
834 {
835 int a.0;
836 int D.2064;
837
838 a.0_3 = a;
839 D.2064_4 = a.0_3 + 4;
840 foo (D.2064_4);
841
842 3) The passed value is an address of an object within another one (which
843 also passed by reference). Such situations are described by an ancestor
844 jump function and describe situations such as:
845
846 B::foo() (struct B * const this)
847 {
848 struct A * D.1845;
849
850 D.1845_2 = &this_1(D)->D.1748;
851 A::bar (D.1845_2);
852
853 INFO is the structure describing individual parameters access different
854 stages of IPA optimizations. PARMS_AINFO contains the information that is
855 only needed for intraprocedural analysis. */
856
857 static void
858 compute_complex_assign_jump_func (struct ipa_node_params *info,
859 struct param_analysis_info *parms_ainfo,
860 struct ipa_jump_func *jfunc,
861 gimple call, gimple stmt, tree name)
862 {
863 HOST_WIDE_INT offset, size, max_size;
864 tree op1, tc_ssa, base, ssa;
865 int index;
866
867 op1 = gimple_assign_rhs1 (stmt);
868
869 if (TREE_CODE (op1) == SSA_NAME)
870 {
871 if (SSA_NAME_IS_DEFAULT_DEF (op1))
872 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
873 else
874 index = load_from_unmodified_param (info, parms_ainfo,
875 SSA_NAME_DEF_STMT (op1));
876 tc_ssa = op1;
877 }
878 else
879 {
880 index = load_from_unmodified_param (info, parms_ainfo, stmt);
881 tc_ssa = gimple_assign_lhs (stmt);
882 }
883
884 if (index >= 0)
885 {
886 tree op2 = gimple_assign_rhs2 (stmt);
887
888 if (op2)
889 {
890 if (!is_gimple_ip_invariant (op2)
891 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
892 && !useless_type_conversion_p (TREE_TYPE (name),
893 TREE_TYPE (op1))))
894 return;
895
896 ipa_set_jf_arith_pass_through (jfunc, index, op2,
897 gimple_assign_rhs_code (stmt));
898 }
899 else if (gimple_assign_single_p (stmt)
900 && !detect_type_change_ssa (tc_ssa, call, jfunc))
901 {
902 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
903 call, tc_ssa);
904 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
905 }
906 return;
907 }
908
909 if (TREE_CODE (op1) != ADDR_EXPR)
910 return;
911 op1 = TREE_OPERAND (op1, 0);
912 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
913 return;
914 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
915 if (TREE_CODE (base) != MEM_REF
916 /* If this is a varying address, punt. */
917 || max_size == -1
918 || max_size != size)
919 return;
920 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
921 ssa = TREE_OPERAND (base, 0);
922 if (TREE_CODE (ssa) != SSA_NAME
923 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
924 || offset < 0)
925 return;
926
927 /* Dynamic types are changed only in constructors and destructors and */
928 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
929 if (index >= 0
930 && !detect_type_change (op1, base, call, jfunc, offset))
931 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
932 parm_ref_data_pass_through_p (&parms_ainfo[index],
933 call, ssa));
934 }
935
936 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
937 it looks like:
938
939 iftmp.1_3 = &obj_2(D)->D.1762;
940
941 The base of the MEM_REF must be a default definition SSA NAME of a
942 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
943 whole MEM_REF expression is returned and the offset calculated from any
944 handled components and the MEM_REF itself is stored into *OFFSET. The whole
945 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
946
947 static tree
948 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
949 {
950 HOST_WIDE_INT size, max_size;
951 tree expr, parm, obj;
952
953 if (!gimple_assign_single_p (assign))
954 return NULL_TREE;
955 expr = gimple_assign_rhs1 (assign);
956
957 if (TREE_CODE (expr) != ADDR_EXPR)
958 return NULL_TREE;
959 expr = TREE_OPERAND (expr, 0);
960 obj = expr;
961 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
962
963 if (TREE_CODE (expr) != MEM_REF
964 /* If this is a varying address, punt. */
965 || max_size == -1
966 || max_size != size
967 || *offset < 0)
968 return NULL_TREE;
969 parm = TREE_OPERAND (expr, 0);
970 if (TREE_CODE (parm) != SSA_NAME
971 || !SSA_NAME_IS_DEFAULT_DEF (parm)
972 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
973 return NULL_TREE;
974
975 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
976 *obj_p = obj;
977 return expr;
978 }
979
980
981 /* Given that an actual argument is an SSA_NAME that is a result of a phi
982 statement PHI, try to find out whether NAME is in fact a
983 multiple-inheritance typecast from a descendant into an ancestor of a formal
984 parameter and thus can be described by an ancestor jump function and if so,
985 write the appropriate function into JFUNC.
986
987 Essentially we want to match the following pattern:
988
989 if (obj_2(D) != 0B)
990 goto <bb 3>;
991 else
992 goto <bb 4>;
993
994 <bb 3>:
995 iftmp.1_3 = &obj_2(D)->D.1762;
996
997 <bb 4>:
998 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
999 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1000 return D.1879_6; */
1001
1002 static void
1003 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1004 struct param_analysis_info *parms_ainfo,
1005 struct ipa_jump_func *jfunc,
1006 gimple call, gimple phi)
1007 {
1008 HOST_WIDE_INT offset;
1009 gimple assign, cond;
1010 basic_block phi_bb, assign_bb, cond_bb;
1011 tree tmp, parm, expr, obj;
1012 int index, i;
1013
1014 if (gimple_phi_num_args (phi) != 2)
1015 return;
1016
1017 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1018 tmp = PHI_ARG_DEF (phi, 0);
1019 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1020 tmp = PHI_ARG_DEF (phi, 1);
1021 else
1022 return;
1023 if (TREE_CODE (tmp) != SSA_NAME
1024 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1025 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1026 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1027 return;
1028
1029 assign = SSA_NAME_DEF_STMT (tmp);
1030 assign_bb = gimple_bb (assign);
1031 if (!single_pred_p (assign_bb))
1032 return;
1033 expr = get_ancestor_addr_info (assign, &obj, &offset);
1034 if (!expr)
1035 return;
1036 parm = TREE_OPERAND (expr, 0);
1037 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1038 gcc_assert (index >= 0);
1039
1040 cond_bb = single_pred (assign_bb);
1041 cond = last_stmt (cond_bb);
1042 if (!cond
1043 || gimple_code (cond) != GIMPLE_COND
1044 || gimple_cond_code (cond) != NE_EXPR
1045 || gimple_cond_lhs (cond) != parm
1046 || !integer_zerop (gimple_cond_rhs (cond)))
1047 return;
1048
1049 phi_bb = gimple_bb (phi);
1050 for (i = 0; i < 2; i++)
1051 {
1052 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1053 if (pred != assign_bb && pred != cond_bb)
1054 return;
1055 }
1056
1057 if (!detect_type_change (obj, expr, call, jfunc, offset))
1058 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1059 parm_ref_data_pass_through_p (&parms_ainfo[index],
1060 call, parm));
1061 }
1062
1063 /* Given OP which is passed as an actual argument to a called function,
1064 determine if it is possible to construct a KNOWN_TYPE jump function for it
1065 and if so, create one and store it to JFUNC. */
1066
1067 static void
1068 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1069 gimple call)
1070 {
1071 HOST_WIDE_INT offset, size, max_size;
1072 tree base;
1073
1074 if (!flag_devirtualize
1075 || TREE_CODE (op) != ADDR_EXPR
1076 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1077 return;
1078
1079 op = TREE_OPERAND (op, 0);
1080 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1081 if (!DECL_P (base)
1082 || max_size == -1
1083 || max_size != size
1084 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1085 || is_global_var (base))
1086 return;
1087
1088 if (!TYPE_BINFO (TREE_TYPE (base))
1089 || detect_type_change (op, base, call, jfunc, offset))
1090 return;
1091
1092 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1093 }
1094
1095 /* Inspect the given TYPE and return true iff it has the same structure (the
1096 same number of fields of the same types) as a C++ member pointer. If
1097 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1098 corresponding fields there. */
1099
1100 static bool
1101 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1102 {
1103 tree fld;
1104
1105 if (TREE_CODE (type) != RECORD_TYPE)
1106 return false;
1107
1108 fld = TYPE_FIELDS (type);
1109 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1110 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1111 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1112 return false;
1113
1114 if (method_ptr)
1115 *method_ptr = fld;
1116
1117 fld = DECL_CHAIN (fld);
1118 if (!fld || INTEGRAL_TYPE_P (fld)
1119 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1120 return false;
1121 if (delta)
1122 *delta = fld;
1123
1124 if (DECL_CHAIN (fld))
1125 return false;
1126
1127 return true;
1128 }
1129
1130 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1131 return the rhs of its defining statement. Otherwise return RHS as it
1132 is. */
1133
1134 static inline tree
1135 get_ssa_def_if_simple_copy (tree rhs)
1136 {
1137 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1138 {
1139 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1140
1141 if (gimple_assign_single_p (def_stmt))
1142 rhs = gimple_assign_rhs1 (def_stmt);
1143 else
1144 break;
1145 }
1146 return rhs;
1147 }
1148
1149 /* Simple linked list, describing known contents of an aggregate beforere
1150 call. */
1151
1152 struct ipa_known_agg_contents_list
1153 {
1154 /* Offset and size of the described part of the aggregate. */
1155 HOST_WIDE_INT offset, size;
1156 /* Known constant value or NULL if the contents is known to be unknown. */
1157 tree constant;
1158 /* Pointer to the next structure in the list. */
1159 struct ipa_known_agg_contents_list *next;
1160 };
1161
1162 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1163 in ARG is filled in with constant values. ARG can either be an aggregate
1164 expression or a pointer to an aggregate. JFUNC is the jump function into
1165 which the constants are subsequently stored. */
1166
1167 static void
1168 determine_known_aggregate_parts (gimple call, tree arg,
1169 struct ipa_jump_func *jfunc)
1170 {
1171 struct ipa_known_agg_contents_list *list = NULL;
1172 int item_count = 0, const_count = 0;
1173 HOST_WIDE_INT arg_offset, arg_size;
1174 gimple_stmt_iterator gsi;
1175 tree arg_base;
1176 bool check_ref, by_ref;
1177 ao_ref r;
1178
1179 /* The function operates in three stages. First, we prepare check_ref, r,
1180 arg_base and arg_offset based on what is actually passed as an actual
1181 argument. */
1182
1183 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1184 {
1185 by_ref = true;
1186 if (TREE_CODE (arg) == SSA_NAME)
1187 {
1188 tree type_size;
1189 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1190 return;
1191 check_ref = true;
1192 arg_base = arg;
1193 arg_offset = 0;
1194 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1195 arg_size = tree_low_cst (type_size, 1);
1196 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1197 }
1198 else if (TREE_CODE (arg) == ADDR_EXPR)
1199 {
1200 HOST_WIDE_INT arg_max_size;
1201
1202 arg = TREE_OPERAND (arg, 0);
1203 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1204 &arg_max_size);
1205 if (arg_max_size == -1
1206 || arg_max_size != arg_size
1207 || arg_offset < 0)
1208 return;
1209 if (DECL_P (arg_base))
1210 {
1211 tree size;
1212 check_ref = false;
1213 size = build_int_cst (integer_type_node, arg_size);
1214 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1215 }
1216 else
1217 return;
1218 }
1219 else
1220 return;
1221 }
1222 else
1223 {
1224 HOST_WIDE_INT arg_max_size;
1225
1226 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1227
1228 by_ref = false;
1229 check_ref = false;
1230 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1231 &arg_max_size);
1232 if (arg_max_size == -1
1233 || arg_max_size != arg_size
1234 || arg_offset < 0)
1235 return;
1236
1237 ao_ref_init (&r, arg);
1238 }
1239
1240 /* Second stage walks back the BB, looks at individual statements and as long
1241 as it is confident of how the statements affect contents of the
1242 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1243 describing it. */
1244 gsi = gsi_for_stmt (call);
1245 gsi_prev (&gsi);
1246 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1247 {
1248 struct ipa_known_agg_contents_list *n, **p;
1249 gimple stmt = gsi_stmt (gsi);
1250 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1251 tree lhs, rhs, lhs_base;
1252 bool partial_overlap;
1253
1254 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1255 continue;
1256 if (!gimple_assign_single_p (stmt))
1257 break;
1258
1259 lhs = gimple_assign_lhs (stmt);
1260 rhs = gimple_assign_rhs1 (stmt);
1261 if (!is_gimple_reg_type (rhs))
1262 break;
1263
1264 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1265 &lhs_max_size);
1266 if (lhs_max_size == -1
1267 || lhs_max_size != lhs_size
1268 || (lhs_offset < arg_offset
1269 && lhs_offset + lhs_size > arg_offset)
1270 || (lhs_offset < arg_offset + arg_size
1271 && lhs_offset + lhs_size > arg_offset + arg_size))
1272 break;
1273
1274 if (check_ref)
1275 {
1276 if (TREE_CODE (lhs_base) != MEM_REF
1277 || TREE_OPERAND (lhs_base, 0) != arg_base
1278 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1279 break;
1280 }
1281 else if (lhs_base != arg_base)
1282 break;
1283
1284 if (lhs_offset + lhs_size < arg_offset
1285 || lhs_offset >= (arg_offset + arg_size))
1286 continue;
1287
1288 partial_overlap = false;
1289 p = &list;
1290 while (*p && (*p)->offset < lhs_offset)
1291 {
1292 if ((*p)->offset + (*p)->size > lhs_offset)
1293 {
1294 partial_overlap = true;
1295 break;
1296 }
1297 p = &(*p)->next;
1298 }
1299 if (partial_overlap)
1300 break;
1301 if (*p && (*p)->offset < lhs_offset + lhs_size)
1302 {
1303 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1304 /* We already know this value is subsequently overwritten with
1305 something else. */
1306 continue;
1307 else
1308 /* Otherwise this is a partial overlap which we cannot
1309 represent. */
1310 break;
1311 }
1312
1313 rhs = get_ssa_def_if_simple_copy (rhs);
1314 n = XALLOCA (struct ipa_known_agg_contents_list);
1315 n->size = lhs_size;
1316 n->offset = lhs_offset;
1317 if (is_gimple_ip_invariant (rhs))
1318 {
1319 n->constant = rhs;
1320 const_count++;
1321 }
1322 else
1323 n->constant = NULL_TREE;
1324 n->next = *p;
1325 *p = n;
1326
1327 item_count++;
1328 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1329 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1330 break;
1331 }
1332
1333 /* Third stage just goes over the list and creates an appropriate vector of
1334 ipa_agg_jf_item structures out of it, of sourse only if there are
1335 any known constants to begin with. */
1336
1337 if (const_count)
1338 {
1339 jfunc->agg.by_ref = by_ref;
1340 jfunc->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, const_count);
1341 while (list)
1342 {
1343 if (list->constant)
1344 {
1345 struct ipa_agg_jf_item item;
1346 item.offset = list->offset - arg_offset;
1347 item.value = list->constant;
1348 VEC_quick_push (ipa_agg_jf_item_t, jfunc->agg.items, item);
1349 }
1350 list = list->next;
1351 }
1352 }
1353 }
1354
1355 /* Compute jump function for all arguments of callsite CS and insert the
1356 information in the jump_functions array in the ipa_edge_args corresponding
1357 to this callsite. */
1358
1359 static void
1360 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1361 struct cgraph_edge *cs)
1362 {
1363 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1364 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1365 gimple call = cs->call_stmt;
1366 int n, arg_num = gimple_call_num_args (call);
1367
1368 if (arg_num == 0 || args->jump_functions)
1369 return;
1370 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
1371
1372 for (n = 0; n < arg_num; n++)
1373 {
1374 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1375 tree arg = gimple_call_arg (call, n);
1376
1377 if (is_gimple_ip_invariant (arg))
1378 ipa_set_jf_constant (jfunc, arg);
1379 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1380 && TREE_CODE (arg) == PARM_DECL)
1381 {
1382 int index = ipa_get_param_decl_index (info, arg);
1383
1384 gcc_assert (index >=0);
1385 /* Aggregate passed by value, check for pass-through, otherwise we
1386 will attempt to fill in aggregate contents later in this
1387 for cycle. */
1388 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1389 {
1390 ipa_set_jf_simple_pass_through (jfunc, index, false);
1391 continue;
1392 }
1393 }
1394 else if (TREE_CODE (arg) == SSA_NAME)
1395 {
1396 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1397 {
1398 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1399 if (index >= 0
1400 && !detect_type_change_ssa (arg, call, jfunc))
1401 {
1402 bool agg_p;
1403 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1404 call, arg);
1405 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1406 }
1407 }
1408 else
1409 {
1410 gimple stmt = SSA_NAME_DEF_STMT (arg);
1411 if (is_gimple_assign (stmt))
1412 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1413 call, stmt, arg);
1414 else if (gimple_code (stmt) == GIMPLE_PHI)
1415 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1416 call, stmt);
1417 }
1418 }
1419 else
1420 compute_known_type_jump_func (arg, jfunc, call);
1421
1422 if ((jfunc->type != IPA_JF_PASS_THROUGH
1423 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1424 && (jfunc->type != IPA_JF_ANCESTOR
1425 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1426 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1427 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1428 determine_known_aggregate_parts (call, arg, jfunc);
1429 }
1430 }
1431
1432 /* Compute jump functions for all edges - both direct and indirect - outgoing
1433 from NODE. Also count the actual arguments in the process. */
1434
1435 static void
1436 ipa_compute_jump_functions (struct cgraph_node *node,
1437 struct param_analysis_info *parms_ainfo)
1438 {
1439 struct cgraph_edge *cs;
1440
1441 for (cs = node->callees; cs; cs = cs->next_callee)
1442 {
1443 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1444 NULL);
1445 /* We do not need to bother analyzing calls to unknown
1446 functions unless they may become known during lto/whopr. */
1447 if (!callee->analyzed && !flag_lto)
1448 continue;
1449 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1450 }
1451
1452 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1453 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1454 }
1455
1456 /* If STMT looks like a statement loading a value from a member pointer formal
1457 parameter, return that parameter and store the offset of the field to
1458 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1459 might be clobbered). If USE_DELTA, then we look for a use of the delta
1460 field rather than the pfn. */
1461
1462 static tree
1463 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1464 HOST_WIDE_INT *offset_p)
1465 {
1466 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1467
1468 if (!gimple_assign_single_p (stmt))
1469 return NULL_TREE;
1470
1471 rhs = gimple_assign_rhs1 (stmt);
1472 if (TREE_CODE (rhs) == COMPONENT_REF)
1473 {
1474 ref_field = TREE_OPERAND (rhs, 1);
1475 rhs = TREE_OPERAND (rhs, 0);
1476 }
1477 else
1478 ref_field = NULL_TREE;
1479 if (TREE_CODE (rhs) != MEM_REF)
1480 return NULL_TREE;
1481 rec = TREE_OPERAND (rhs, 0);
1482 if (TREE_CODE (rec) != ADDR_EXPR)
1483 return NULL_TREE;
1484 rec = TREE_OPERAND (rec, 0);
1485 if (TREE_CODE (rec) != PARM_DECL
1486 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1487 return NULL_TREE;
1488 ref_offset = TREE_OPERAND (rhs, 1);
1489
1490 if (use_delta)
1491 fld = delta_field;
1492 else
1493 fld = ptr_field;
1494 if (offset_p)
1495 *offset_p = int_bit_position (fld);
1496
1497 if (ref_field)
1498 {
1499 if (integer_nonzerop (ref_offset))
1500 return NULL_TREE;
1501 return ref_field == fld ? rec : NULL_TREE;
1502 }
1503 else
1504 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1505 : NULL_TREE;
1506 }
1507
1508 /* Returns true iff T is an SSA_NAME defined by a statement. */
1509
1510 static bool
1511 ipa_is_ssa_with_stmt_def (tree t)
1512 {
1513 if (TREE_CODE (t) == SSA_NAME
1514 && !SSA_NAME_IS_DEFAULT_DEF (t))
1515 return true;
1516 else
1517 return false;
1518 }
1519
1520 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1521 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1522 indirect call graph edge. */
1523
1524 static struct cgraph_edge *
1525 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1526 {
1527 struct cgraph_edge *cs;
1528
1529 cs = cgraph_edge (node, stmt);
1530 cs->indirect_info->param_index = param_index;
1531 cs->indirect_info->offset = 0;
1532 cs->indirect_info->polymorphic = 0;
1533 cs->indirect_info->agg_contents = 0;
1534 return cs;
1535 }
1536
1537 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1538 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1539 intermediate information about each formal parameter. Currently it checks
1540 whether the call calls a pointer that is a formal parameter and if so, the
1541 parameter is marked with the called flag and an indirect call graph edge
1542 describing the call is created. This is very simple for ordinary pointers
1543 represented in SSA but not-so-nice when it comes to member pointers. The
1544 ugly part of this function does nothing more than trying to match the
1545 pattern of such a call. An example of such a pattern is the gimple dump
1546 below, the call is on the last line:
1547
1548 <bb 2>:
1549 f$__delta_5 = f.__delta;
1550 f$__pfn_24 = f.__pfn;
1551
1552 or
1553 <bb 2>:
1554 f$__delta_5 = MEM[(struct *)&f];
1555 f$__pfn_24 = MEM[(struct *)&f + 4B];
1556
1557 and a few lines below:
1558
1559 <bb 5>
1560 D.2496_3 = (int) f$__pfn_24;
1561 D.2497_4 = D.2496_3 & 1;
1562 if (D.2497_4 != 0)
1563 goto <bb 3>;
1564 else
1565 goto <bb 4>;
1566
1567 <bb 6>:
1568 D.2500_7 = (unsigned int) f$__delta_5;
1569 D.2501_8 = &S + D.2500_7;
1570 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1571 D.2503_10 = *D.2502_9;
1572 D.2504_12 = f$__pfn_24 + -1;
1573 D.2505_13 = (unsigned int) D.2504_12;
1574 D.2506_14 = D.2503_10 + D.2505_13;
1575 D.2507_15 = *D.2506_14;
1576 iftmp.11_16 = (String:: *) D.2507_15;
1577
1578 <bb 7>:
1579 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1580 D.2500_19 = (unsigned int) f$__delta_5;
1581 D.2508_20 = &S + D.2500_19;
1582 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1583
1584 Such patterns are results of simple calls to a member pointer:
1585
1586 int doprinting (int (MyString::* f)(int) const)
1587 {
1588 MyString S ("somestring");
1589
1590 return (S.*f)(4);
1591 }
1592
1593 Moreover, the function also looks for called pointers loaded from aggregates
1594 passed by value or reference. */
1595
1596 static void
1597 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1598 struct ipa_node_params *info,
1599 struct param_analysis_info *parms_ainfo,
1600 gimple call, tree target)
1601 {
1602 gimple def;
1603 tree n1, n2;
1604 gimple d1, d2;
1605 tree rec, rec2, cond;
1606 gimple branch;
1607 int index;
1608 basic_block bb, virt_bb, join;
1609 HOST_WIDE_INT offset;
1610 bool by_ref;
1611
1612 if (SSA_NAME_IS_DEFAULT_DEF (target))
1613 {
1614 tree var = SSA_NAME_VAR (target);
1615 index = ipa_get_param_decl_index (info, var);
1616 if (index >= 0)
1617 ipa_note_param_call (node, index, call);
1618 return;
1619 }
1620
1621 def = SSA_NAME_DEF_STMT (target);
1622 if (gimple_assign_single_p (def)
1623 && ipa_load_from_parm_agg_1 (info, parms_ainfo, def,
1624 gimple_assign_rhs1 (def), &index, &offset,
1625 &by_ref))
1626 {
1627 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1628 cs->indirect_info->offset = offset;
1629 cs->indirect_info->agg_contents = 1;
1630 cs->indirect_info->by_ref = by_ref;
1631 return;
1632 }
1633
1634 /* Now we need to try to match the complex pattern of calling a member
1635 pointer. */
1636 if (gimple_code (def) != GIMPLE_PHI
1637 || gimple_phi_num_args (def) != 2
1638 || !POINTER_TYPE_P (TREE_TYPE (target))
1639 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1640 return;
1641
1642 /* First, we need to check whether one of these is a load from a member
1643 pointer that is a parameter to this function. */
1644 n1 = PHI_ARG_DEF (def, 0);
1645 n2 = PHI_ARG_DEF (def, 1);
1646 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1647 return;
1648 d1 = SSA_NAME_DEF_STMT (n1);
1649 d2 = SSA_NAME_DEF_STMT (n2);
1650
1651 join = gimple_bb (def);
1652 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1653 {
1654 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1655 return;
1656
1657 bb = EDGE_PRED (join, 0)->src;
1658 virt_bb = gimple_bb (d2);
1659 }
1660 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1661 {
1662 bb = EDGE_PRED (join, 1)->src;
1663 virt_bb = gimple_bb (d1);
1664 }
1665 else
1666 return;
1667
1668 /* Second, we need to check that the basic blocks are laid out in the way
1669 corresponding to the pattern. */
1670
1671 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1672 || single_pred (virt_bb) != bb
1673 || single_succ (virt_bb) != join)
1674 return;
1675
1676 /* Third, let's see that the branching is done depending on the least
1677 significant bit of the pfn. */
1678
1679 branch = last_stmt (bb);
1680 if (!branch || gimple_code (branch) != GIMPLE_COND)
1681 return;
1682
1683 if ((gimple_cond_code (branch) != NE_EXPR
1684 && gimple_cond_code (branch) != EQ_EXPR)
1685 || !integer_zerop (gimple_cond_rhs (branch)))
1686 return;
1687
1688 cond = gimple_cond_lhs (branch);
1689 if (!ipa_is_ssa_with_stmt_def (cond))
1690 return;
1691
1692 def = SSA_NAME_DEF_STMT (cond);
1693 if (!is_gimple_assign (def)
1694 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1695 || !integer_onep (gimple_assign_rhs2 (def)))
1696 return;
1697
1698 cond = gimple_assign_rhs1 (def);
1699 if (!ipa_is_ssa_with_stmt_def (cond))
1700 return;
1701
1702 def = SSA_NAME_DEF_STMT (cond);
1703
1704 if (is_gimple_assign (def)
1705 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1706 {
1707 cond = gimple_assign_rhs1 (def);
1708 if (!ipa_is_ssa_with_stmt_def (cond))
1709 return;
1710 def = SSA_NAME_DEF_STMT (cond);
1711 }
1712
1713 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1714 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1715 == ptrmemfunc_vbit_in_delta),
1716 NULL);
1717 if (rec != rec2)
1718 return;
1719
1720 index = ipa_get_param_decl_index (info, rec);
1721 if (index >= 0
1722 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1723 {
1724 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1725 cs->indirect_info->offset = offset;
1726 cs->indirect_info->agg_contents = 1;
1727 }
1728
1729 return;
1730 }
1731
1732 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1733 object referenced in the expression is a formal parameter of the caller
1734 (described by INFO), create a call note for the statement. */
1735
1736 static void
1737 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1738 struct ipa_node_params *info, gimple call,
1739 tree target)
1740 {
1741 struct cgraph_edge *cs;
1742 struct cgraph_indirect_call_info *ii;
1743 struct ipa_jump_func jfunc;
1744 tree obj = OBJ_TYPE_REF_OBJECT (target);
1745 int index;
1746 HOST_WIDE_INT anc_offset;
1747
1748 if (!flag_devirtualize)
1749 return;
1750
1751 if (TREE_CODE (obj) != SSA_NAME)
1752 return;
1753
1754 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1755 {
1756 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1757 return;
1758
1759 anc_offset = 0;
1760 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1761 gcc_assert (index >= 0);
1762 if (detect_type_change_ssa (obj, call, &jfunc))
1763 return;
1764 }
1765 else
1766 {
1767 gimple stmt = SSA_NAME_DEF_STMT (obj);
1768 tree expr;
1769
1770 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1771 if (!expr)
1772 return;
1773 index = ipa_get_param_decl_index (info,
1774 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1775 gcc_assert (index >= 0);
1776 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1777 return;
1778 }
1779
1780 cs = ipa_note_param_call (node, index, call);
1781 ii = cs->indirect_info;
1782 ii->offset = anc_offset;
1783 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1784 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1785 ii->polymorphic = 1;
1786 }
1787
1788 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1789 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1790 containing intermediate information about each formal parameter. */
1791
1792 static void
1793 ipa_analyze_call_uses (struct cgraph_node *node,
1794 struct ipa_node_params *info,
1795 struct param_analysis_info *parms_ainfo, gimple call)
1796 {
1797 tree target = gimple_call_fn (call);
1798
1799 if (!target)
1800 return;
1801 if (TREE_CODE (target) == SSA_NAME)
1802 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1803 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1804 ipa_analyze_virtual_call_uses (node, info, call, target);
1805 }
1806
1807
1808 /* Analyze the call statement STMT with respect to formal parameters (described
1809 in INFO) of caller given by NODE. Currently it only checks whether formal
1810 parameters are called. PARMS_AINFO is a pointer to a vector containing
1811 intermediate information about each formal parameter. */
1812
1813 static void
1814 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1815 struct param_analysis_info *parms_ainfo, gimple stmt)
1816 {
1817 if (is_gimple_call (stmt))
1818 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1819 }
1820
1821 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1822 If OP is a parameter declaration, mark it as used in the info structure
1823 passed in DATA. */
1824
1825 static bool
1826 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1827 tree op, void *data)
1828 {
1829 struct ipa_node_params *info = (struct ipa_node_params *) data;
1830
1831 op = get_base_address (op);
1832 if (op
1833 && TREE_CODE (op) == PARM_DECL)
1834 {
1835 int index = ipa_get_param_decl_index (info, op);
1836 gcc_assert (index >= 0);
1837 ipa_set_param_used (info, index, true);
1838 }
1839
1840 return false;
1841 }
1842
1843 /* Scan the function body of NODE and inspect the uses of formal parameters.
1844 Store the findings in various structures of the associated ipa_node_params
1845 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1846 vector containing intermediate information about each formal parameter. */
1847
1848 static void
1849 ipa_analyze_params_uses (struct cgraph_node *node,
1850 struct param_analysis_info *parms_ainfo)
1851 {
1852 tree decl = node->symbol.decl;
1853 basic_block bb;
1854 struct function *func;
1855 gimple_stmt_iterator gsi;
1856 struct ipa_node_params *info = IPA_NODE_REF (node);
1857 int i;
1858
1859 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1860 return;
1861
1862 for (i = 0; i < ipa_get_param_count (info); i++)
1863 {
1864 tree parm = ipa_get_param (info, i);
1865 tree ddef;
1866 /* For SSA regs see if parameter is used. For non-SSA we compute
1867 the flag during modification analysis. */
1868 if (is_gimple_reg (parm)
1869 && (ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1870 parm)) != NULL_TREE
1871 && !has_zero_uses (ddef))
1872 ipa_set_param_used (info, i, true);
1873 }
1874
1875 func = DECL_STRUCT_FUNCTION (decl);
1876 FOR_EACH_BB_FN (bb, func)
1877 {
1878 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1879 {
1880 gimple stmt = gsi_stmt (gsi);
1881
1882 if (is_gimple_debug (stmt))
1883 continue;
1884
1885 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1886 walk_stmt_load_store_addr_ops (stmt, info,
1887 visit_ref_for_mod_analysis,
1888 visit_ref_for_mod_analysis,
1889 visit_ref_for_mod_analysis);
1890 }
1891 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1892 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1893 visit_ref_for_mod_analysis,
1894 visit_ref_for_mod_analysis,
1895 visit_ref_for_mod_analysis);
1896 }
1897
1898 info->uses_analysis_done = 1;
1899 }
1900
1901 /* Initialize the array describing properties of of formal parameters
1902 of NODE, analyze their uses and compute jump functions associated
1903 with actual arguments of calls from within NODE. */
1904
1905 void
1906 ipa_analyze_node (struct cgraph_node *node)
1907 {
1908 struct ipa_node_params *info;
1909 struct param_analysis_info *parms_ainfo;
1910 int i, param_count;
1911
1912 ipa_check_create_node_params ();
1913 ipa_check_create_edge_args ();
1914 info = IPA_NODE_REF (node);
1915 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
1916 current_function_decl = node->symbol.decl;
1917 ipa_initialize_node_params (node);
1918
1919 param_count = ipa_get_param_count (info);
1920 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1921 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1922
1923 ipa_analyze_params_uses (node, parms_ainfo);
1924 ipa_compute_jump_functions (node, parms_ainfo);
1925
1926 for (i = 0; i < param_count; i++)
1927 {
1928 if (parms_ainfo[i].parm_visited_statements)
1929 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
1930 if (parms_ainfo[i].pt_visited_statements)
1931 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
1932 }
1933
1934 current_function_decl = NULL;
1935 pop_cfun ();
1936 }
1937
1938
1939 /* Update the jump function DST when the call graph edge corresponding to SRC is
1940 is being inlined, knowing that DST is of type ancestor and src of known
1941 type. */
1942
1943 static void
1944 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1945 struct ipa_jump_func *dst)
1946 {
1947 HOST_WIDE_INT combined_offset;
1948 tree combined_type;
1949
1950 combined_offset = ipa_get_jf_known_type_offset (src)
1951 + ipa_get_jf_ancestor_offset (dst);
1952 combined_type = ipa_get_jf_ancestor_type (dst);
1953
1954 ipa_set_jf_known_type (dst, combined_offset,
1955 ipa_get_jf_known_type_base_type (src),
1956 combined_type);
1957 }
1958
1959 /* Update the jump functions associated with call graph edge E when the call
1960 graph edge CS is being inlined, assuming that E->caller is already (possibly
1961 indirectly) inlined into CS->callee and that E has not been inlined. */
1962
1963 static void
1964 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1965 struct cgraph_edge *e)
1966 {
1967 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1968 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1969 int count = ipa_get_cs_argument_count (args);
1970 int i;
1971
1972 for (i = 0; i < count; i++)
1973 {
1974 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1975
1976 if (dst->type == IPA_JF_ANCESTOR)
1977 {
1978 struct ipa_jump_func *src;
1979 int dst_fid = dst->value.ancestor.formal_id;
1980
1981 /* Variable number of arguments can cause havoc if we try to access
1982 one that does not exist in the inlined edge. So make sure we
1983 don't. */
1984 if (dst_fid >= ipa_get_cs_argument_count (top))
1985 {
1986 dst->type = IPA_JF_UNKNOWN;
1987 continue;
1988 }
1989
1990 src = ipa_get_ith_jump_func (top, dst_fid);
1991
1992 if (src->agg.items
1993 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
1994 {
1995 struct ipa_agg_jf_item *item;
1996 int j;
1997
1998 /* Currently we do not produce clobber aggregate jump functions,
1999 replace with merging when we do. */
2000 gcc_assert (!dst->agg.items);
2001
2002 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc, src->agg.items);
2003 dst->agg.by_ref = src->agg.by_ref;
2004 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, dst->agg.items, j, item)
2005 item->offset -= dst->value.ancestor.offset;
2006 }
2007
2008 if (src->type == IPA_JF_KNOWN_TYPE)
2009 combine_known_type_and_ancestor_jfs (src, dst);
2010 else if (src->type == IPA_JF_PASS_THROUGH
2011 && src->value.pass_through.operation == NOP_EXPR)
2012 {
2013 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2014 dst->value.ancestor.agg_preserved &=
2015 src->value.pass_through.agg_preserved;
2016 }
2017 else if (src->type == IPA_JF_ANCESTOR)
2018 {
2019 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2020 dst->value.ancestor.offset += src->value.ancestor.offset;
2021 dst->value.ancestor.agg_preserved &=
2022 src->value.ancestor.agg_preserved;
2023 }
2024 else
2025 dst->type = IPA_JF_UNKNOWN;
2026 }
2027 else if (dst->type == IPA_JF_PASS_THROUGH)
2028 {
2029 struct ipa_jump_func *src;
2030 /* We must check range due to calls with variable number of arguments
2031 and we cannot combine jump functions with operations. */
2032 if (dst->value.pass_through.operation == NOP_EXPR
2033 && (dst->value.pass_through.formal_id
2034 < ipa_get_cs_argument_count (top)))
2035 {
2036 bool agg_p;
2037 int dst_fid = dst->value.pass_through.formal_id;
2038 src = ipa_get_ith_jump_func (top, dst_fid);
2039 agg_p = dst->value.pass_through.agg_preserved;
2040
2041 dst->type = src->type;
2042 dst->value = src->value;
2043
2044 if (src->agg.items
2045 && (agg_p || !src->agg.by_ref))
2046 {
2047 /* Currently we do not produce clobber aggregate jump
2048 functions, replace with merging when we do. */
2049 gcc_assert (!dst->agg.items);
2050
2051 dst->agg.by_ref = src->agg.by_ref;
2052 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc,
2053 src->agg.items);
2054 }
2055
2056 if (!agg_p)
2057 {
2058 if (dst->type == IPA_JF_PASS_THROUGH)
2059 dst->value.pass_through.agg_preserved = false;
2060 else if (dst->type == IPA_JF_ANCESTOR)
2061 dst->value.ancestor.agg_preserved = false;
2062 }
2063 }
2064 else
2065 dst->type = IPA_JF_UNKNOWN;
2066 }
2067 }
2068 }
2069
2070 /* If TARGET is an addr_expr of a function declaration, make it the destination
2071 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2072
2073 struct cgraph_edge *
2074 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2075 {
2076 struct cgraph_node *callee;
2077
2078 if (TREE_CODE (target) == ADDR_EXPR)
2079 target = TREE_OPERAND (target, 0);
2080 if (TREE_CODE (target) != FUNCTION_DECL)
2081 return NULL;
2082 callee = cgraph_get_node (target);
2083 if (!callee)
2084 return NULL;
2085 ipa_check_create_node_params ();
2086
2087 /* We can not make edges to inline clones. It is bug that someone removed
2088 the cgraph node too early. */
2089 gcc_assert (!callee->global.inlined_to);
2090
2091 cgraph_make_edge_direct (ie, callee);
2092 if (dump_file)
2093 {
2094 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2095 "(%s/%i -> %s/%i), for stmt ",
2096 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2097 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
2098 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
2099 if (ie->call_stmt)
2100 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2101 else
2102 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2103 }
2104 callee = cgraph_function_or_thunk_node (callee, NULL);
2105
2106 return ie;
2107 }
2108
2109 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2110 return NULL if there is not any. BY_REF specifies whether the value has to
2111 be passed by reference or by value. */
2112
2113 tree
2114 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2115 HOST_WIDE_INT offset, bool by_ref)
2116 {
2117 struct ipa_agg_jf_item *item;
2118 int i;
2119
2120 if (by_ref != agg->by_ref)
2121 return NULL;
2122
2123 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, agg->items, i, item)
2124 {
2125 if (item->offset == offset)
2126 {
2127 /* Currently we do not have clobber values, return NULL for them once
2128 we do. */
2129 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2130 return item->value;
2131 }
2132 else if (item->offset > offset)
2133 return NULL;
2134 }
2135 return NULL;
2136 }
2137
2138 /* Try to find a destination for indirect edge IE that corresponds to a simple
2139 call or a call of a member function pointer and where the destination is a
2140 pointer formal parameter described by jump function JFUNC. If it can be
2141 determined, return the newly direct edge, otherwise return NULL. */
2142
2143 static struct cgraph_edge *
2144 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2145 struct ipa_jump_func *jfunc)
2146 {
2147 tree target;
2148
2149 if (ie->indirect_info->agg_contents)
2150 {
2151 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2152 ie->indirect_info->offset,
2153 ie->indirect_info->by_ref);
2154 if (!target)
2155 return NULL;
2156 }
2157 else
2158 {
2159 if (jfunc->type != IPA_JF_CONST)
2160 return NULL;
2161 target = ipa_get_jf_constant (jfunc);
2162 }
2163 return ipa_make_edge_direct_to_target (ie, target);
2164 }
2165
2166 /* Try to find a destination for indirect edge IE that corresponds to a
2167 virtual call based on a formal parameter which is described by jump
2168 function JFUNC and if it can be determined, make it direct and return the
2169 direct edge. Otherwise, return NULL. */
2170
2171 static struct cgraph_edge *
2172 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2173 struct ipa_jump_func *jfunc)
2174 {
2175 tree binfo, target;
2176
2177 if (jfunc->type != IPA_JF_KNOWN_TYPE)
2178 return NULL;
2179
2180 binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
2181 gcc_checking_assert (binfo);
2182 binfo = get_binfo_at_offset (binfo, ipa_get_jf_known_type_offset (jfunc)
2183 + ie->indirect_info->offset,
2184 ie->indirect_info->otr_type);
2185 if (binfo)
2186 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2187 binfo);
2188 else
2189 return NULL;
2190
2191 if (target)
2192 return ipa_make_edge_direct_to_target (ie, target);
2193 else
2194 return NULL;
2195 }
2196
2197 /* Update the param called notes associated with NODE when CS is being inlined,
2198 assuming NODE is (potentially indirectly) inlined into CS->callee.
2199 Moreover, if the callee is discovered to be constant, create a new cgraph
2200 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2201 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2202
2203 static bool
2204 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2205 struct cgraph_node *node,
2206 VEC (cgraph_edge_p, heap) **new_edges)
2207 {
2208 struct ipa_edge_args *top;
2209 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2210 bool res = false;
2211
2212 ipa_check_create_edge_args ();
2213 top = IPA_EDGE_REF (cs);
2214
2215 for (ie = node->indirect_calls; ie; ie = next_ie)
2216 {
2217 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2218 struct ipa_jump_func *jfunc;
2219 int param_index;
2220
2221 next_ie = ie->next_callee;
2222
2223 if (ici->param_index == -1)
2224 continue;
2225
2226 /* We must check range due to calls with variable number of arguments: */
2227 if (ici->param_index >= ipa_get_cs_argument_count (top))
2228 {
2229 ici->param_index = -1;
2230 continue;
2231 }
2232
2233 param_index = ici->param_index;
2234 jfunc = ipa_get_ith_jump_func (top, param_index);
2235 if (jfunc->type == IPA_JF_PASS_THROUGH
2236 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2237 {
2238 if (ici->agg_contents
2239 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2240 ici->param_index = -1;
2241 else
2242 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2243 }
2244 else if (jfunc->type == IPA_JF_ANCESTOR)
2245 {
2246 if (ici->agg_contents
2247 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2248 ici->param_index = -1;
2249 else
2250 {
2251 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2252 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2253 }
2254 }
2255 else
2256 /* Either we can find a destination for this edge now or never. */
2257 ici->param_index = -1;
2258
2259 if (!flag_indirect_inlining)
2260 continue;
2261
2262 if (ici->polymorphic)
2263 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
2264 else
2265 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
2266
2267 if (new_direct_edge)
2268 {
2269 new_direct_edge->indirect_inlining_edge = 1;
2270 if (new_direct_edge->call_stmt)
2271 new_direct_edge->call_stmt_cannot_inline_p
2272 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
2273 new_direct_edge->callee->symbol.decl);
2274 if (new_edges)
2275 {
2276 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
2277 new_direct_edge);
2278 top = IPA_EDGE_REF (cs);
2279 res = true;
2280 }
2281 }
2282 }
2283
2284 return res;
2285 }
2286
2287 /* Recursively traverse subtree of NODE (including node) made of inlined
2288 cgraph_edges when CS has been inlined and invoke
2289 update_indirect_edges_after_inlining on all nodes and
2290 update_jump_functions_after_inlining on all non-inlined edges that lead out
2291 of this subtree. Newly discovered indirect edges will be added to
2292 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2293 created. */
2294
2295 static bool
2296 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2297 struct cgraph_node *node,
2298 VEC (cgraph_edge_p, heap) **new_edges)
2299 {
2300 struct cgraph_edge *e;
2301 bool res;
2302
2303 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2304
2305 for (e = node->callees; e; e = e->next_callee)
2306 if (!e->inline_failed)
2307 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2308 else
2309 update_jump_functions_after_inlining (cs, e);
2310 for (e = node->indirect_calls; e; e = e->next_callee)
2311 update_jump_functions_after_inlining (cs, e);
2312
2313 return res;
2314 }
2315
2316 /* Update jump functions and call note functions on inlining the call site CS.
2317 CS is expected to lead to a node already cloned by
2318 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2319 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2320 created. */
2321
2322 bool
2323 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2324 VEC (cgraph_edge_p, heap) **new_edges)
2325 {
2326 bool changed;
2327 /* Do nothing if the preparation phase has not been carried out yet
2328 (i.e. during early inlining). */
2329 if (!ipa_node_params_vector)
2330 return false;
2331 gcc_assert (ipa_edge_args_vector);
2332
2333 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2334
2335 /* We do not keep jump functions of inlined edges up to date. Better to free
2336 them so we do not access them accidentally. */
2337 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2338 return changed;
2339 }
2340
2341 /* Frees all dynamically allocated structures that the argument info points
2342 to. */
2343
2344 void
2345 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2346 {
2347 if (args->jump_functions)
2348 ggc_free (args->jump_functions);
2349
2350 memset (args, 0, sizeof (*args));
2351 }
2352
2353 /* Free all ipa_edge structures. */
2354
2355 void
2356 ipa_free_all_edge_args (void)
2357 {
2358 int i;
2359 struct ipa_edge_args *args;
2360
2361 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
2362 ipa_free_edge_args_substructures (args);
2363
2364 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
2365 ipa_edge_args_vector = NULL;
2366 }
2367
2368 /* Frees all dynamically allocated structures that the param info points
2369 to. */
2370
2371 void
2372 ipa_free_node_params_substructures (struct ipa_node_params *info)
2373 {
2374 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
2375 free (info->lattices);
2376 /* Lattice values and their sources are deallocated with their alocation
2377 pool. */
2378 VEC_free (tree, heap, info->known_vals);
2379 memset (info, 0, sizeof (*info));
2380 }
2381
2382 /* Free all ipa_node_params structures. */
2383
2384 void
2385 ipa_free_all_node_params (void)
2386 {
2387 int i;
2388 struct ipa_node_params *info;
2389
2390 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
2391 ipa_free_node_params_substructures (info);
2392
2393 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
2394 ipa_node_params_vector = NULL;
2395 }
2396
2397 /* Hook that is called by cgraph.c when an edge is removed. */
2398
2399 static void
2400 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2401 {
2402 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2403 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
2404 <= (unsigned)cs->uid)
2405 return;
2406 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2407 }
2408
2409 /* Hook that is called by cgraph.c when a node is removed. */
2410
2411 static void
2412 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2413 {
2414 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2415 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
2416 <= (unsigned)node->uid)
2417 return;
2418 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2419 }
2420
2421 /* Hook that is called by cgraph.c when an edge is duplicated. */
2422
2423 static void
2424 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2425 __attribute__((unused)) void *data)
2426 {
2427 struct ipa_edge_args *old_args, *new_args;
2428 unsigned int i;
2429
2430 ipa_check_create_edge_args ();
2431
2432 old_args = IPA_EDGE_REF (src);
2433 new_args = IPA_EDGE_REF (dst);
2434
2435 new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
2436 old_args->jump_functions);
2437
2438 for (i = 0; i < VEC_length (ipa_jump_func_t, old_args->jump_functions); i++)
2439 VEC_index (ipa_jump_func_t, new_args->jump_functions, i).agg.items
2440 = VEC_copy (ipa_agg_jf_item_t, gc,
2441 VEC_index (ipa_jump_func_t,
2442 old_args->jump_functions, i).agg.items);
2443 }
2444
2445 /* Hook that is called by cgraph.c when a node is duplicated. */
2446
2447 static void
2448 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2449 ATTRIBUTE_UNUSED void *data)
2450 {
2451 struct ipa_node_params *old_info, *new_info;
2452
2453 ipa_check_create_node_params ();
2454 old_info = IPA_NODE_REF (src);
2455 new_info = IPA_NODE_REF (dst);
2456
2457 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
2458 old_info->descriptors);
2459 new_info->lattices = NULL;
2460 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2461
2462 new_info->uses_analysis_done = old_info->uses_analysis_done;
2463 new_info->node_enqueued = old_info->node_enqueued;
2464 }
2465
2466
2467 /* Analyze newly added function into callgraph. */
2468
2469 static void
2470 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2471 {
2472 ipa_analyze_node (node);
2473 }
2474
2475 /* Register our cgraph hooks if they are not already there. */
2476
2477 void
2478 ipa_register_cgraph_hooks (void)
2479 {
2480 if (!edge_removal_hook_holder)
2481 edge_removal_hook_holder =
2482 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2483 if (!node_removal_hook_holder)
2484 node_removal_hook_holder =
2485 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2486 if (!edge_duplication_hook_holder)
2487 edge_duplication_hook_holder =
2488 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2489 if (!node_duplication_hook_holder)
2490 node_duplication_hook_holder =
2491 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2492 function_insertion_hook_holder =
2493 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2494 }
2495
2496 /* Unregister our cgraph hooks if they are not already there. */
2497
2498 static void
2499 ipa_unregister_cgraph_hooks (void)
2500 {
2501 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2502 edge_removal_hook_holder = NULL;
2503 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2504 node_removal_hook_holder = NULL;
2505 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2506 edge_duplication_hook_holder = NULL;
2507 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2508 node_duplication_hook_holder = NULL;
2509 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2510 function_insertion_hook_holder = NULL;
2511 }
2512
2513 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2514 longer needed after ipa-cp. */
2515
2516 void
2517 ipa_free_all_structures_after_ipa_cp (void)
2518 {
2519 if (!optimize)
2520 {
2521 ipa_free_all_edge_args ();
2522 ipa_free_all_node_params ();
2523 free_alloc_pool (ipcp_sources_pool);
2524 free_alloc_pool (ipcp_values_pool);
2525 ipa_unregister_cgraph_hooks ();
2526 }
2527 }
2528
2529 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2530 longer needed after indirect inlining. */
2531
2532 void
2533 ipa_free_all_structures_after_iinln (void)
2534 {
2535 ipa_free_all_edge_args ();
2536 ipa_free_all_node_params ();
2537 ipa_unregister_cgraph_hooks ();
2538 if (ipcp_sources_pool)
2539 free_alloc_pool (ipcp_sources_pool);
2540 if (ipcp_values_pool)
2541 free_alloc_pool (ipcp_values_pool);
2542 }
2543
2544 /* Print ipa_tree_map data structures of all functions in the
2545 callgraph to F. */
2546
2547 void
2548 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2549 {
2550 int i, count;
2551 tree temp;
2552 struct ipa_node_params *info;
2553
2554 if (!node->analyzed)
2555 return;
2556 info = IPA_NODE_REF (node);
2557 fprintf (f, " function %s parameter descriptors:\n",
2558 cgraph_node_name (node));
2559 count = ipa_get_param_count (info);
2560 for (i = 0; i < count; i++)
2561 {
2562 temp = ipa_get_param (info, i);
2563 if (TREE_CODE (temp) == PARM_DECL)
2564 fprintf (f, " param %d : %s", i,
2565 (DECL_NAME (temp)
2566 ? (*lang_hooks.decl_printable_name) (temp, 2)
2567 : "(unnamed)"));
2568 if (ipa_is_param_used (info, i))
2569 fprintf (f, " used");
2570 fprintf (f, "\n");
2571 }
2572 }
2573
2574 /* Print ipa_tree_map data structures of all functions in the
2575 callgraph to F. */
2576
2577 void
2578 ipa_print_all_params (FILE * f)
2579 {
2580 struct cgraph_node *node;
2581
2582 fprintf (f, "\nFunction parameters:\n");
2583 FOR_EACH_FUNCTION (node)
2584 ipa_print_node_params (f, node);
2585 }
2586
2587 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2588
2589 VEC(tree, heap) *
2590 ipa_get_vector_of_formal_parms (tree fndecl)
2591 {
2592 VEC(tree, heap) *args;
2593 int count;
2594 tree parm;
2595
2596 count = count_formal_params (fndecl);
2597 args = VEC_alloc (tree, heap, count);
2598 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2599 VEC_quick_push (tree, args, parm);
2600
2601 return args;
2602 }
2603
2604 /* Return a heap allocated vector containing types of formal parameters of
2605 function type FNTYPE. */
2606
2607 static inline VEC(tree, heap) *
2608 get_vector_of_formal_parm_types (tree fntype)
2609 {
2610 VEC(tree, heap) *types;
2611 int count = 0;
2612 tree t;
2613
2614 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2615 count++;
2616
2617 types = VEC_alloc (tree, heap, count);
2618 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2619 VEC_quick_push (tree, types, TREE_VALUE (t));
2620
2621 return types;
2622 }
2623
2624 /* Modify the function declaration FNDECL and its type according to the plan in
2625 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2626 to reflect the actual parameters being modified which are determined by the
2627 base_index field. */
2628
2629 void
2630 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2631 const char *synth_parm_prefix)
2632 {
2633 VEC(tree, heap) *oparms, *otypes;
2634 tree orig_type, new_type = NULL;
2635 tree old_arg_types, t, new_arg_types = NULL;
2636 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2637 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2638 tree new_reversed = NULL;
2639 bool care_for_types, last_parm_void;
2640
2641 if (!synth_parm_prefix)
2642 synth_parm_prefix = "SYNTH";
2643
2644 oparms = ipa_get_vector_of_formal_parms (fndecl);
2645 orig_type = TREE_TYPE (fndecl);
2646 old_arg_types = TYPE_ARG_TYPES (orig_type);
2647
2648 /* The following test is an ugly hack, some functions simply don't have any
2649 arguments in their type. This is probably a bug but well... */
2650 care_for_types = (old_arg_types != NULL_TREE);
2651 if (care_for_types)
2652 {
2653 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2654 == void_type_node);
2655 otypes = get_vector_of_formal_parm_types (orig_type);
2656 if (last_parm_void)
2657 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2658 else
2659 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2660 }
2661 else
2662 {
2663 last_parm_void = false;
2664 otypes = NULL;
2665 }
2666
2667 for (i = 0; i < len; i++)
2668 {
2669 struct ipa_parm_adjustment *adj;
2670 gcc_assert (link);
2671
2672 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2673 parm = VEC_index (tree, oparms, adj->base_index);
2674 adj->base = parm;
2675
2676 if (adj->copy_param)
2677 {
2678 if (care_for_types)
2679 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2680 adj->base_index),
2681 new_arg_types);
2682 *link = parm;
2683 link = &DECL_CHAIN (parm);
2684 }
2685 else if (!adj->remove_param)
2686 {
2687 tree new_parm;
2688 tree ptype;
2689
2690 if (adj->by_ref)
2691 ptype = build_pointer_type (adj->type);
2692 else
2693 ptype = adj->type;
2694
2695 if (care_for_types)
2696 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2697
2698 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2699 ptype);
2700 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2701
2702 DECL_ARTIFICIAL (new_parm) = 1;
2703 DECL_ARG_TYPE (new_parm) = ptype;
2704 DECL_CONTEXT (new_parm) = fndecl;
2705 TREE_USED (new_parm) = 1;
2706 DECL_IGNORED_P (new_parm) = 1;
2707 layout_decl (new_parm, 0);
2708
2709 adj->base = parm;
2710 adj->reduction = new_parm;
2711
2712 *link = new_parm;
2713
2714 link = &DECL_CHAIN (new_parm);
2715 }
2716 }
2717
2718 *link = NULL_TREE;
2719
2720 if (care_for_types)
2721 {
2722 new_reversed = nreverse (new_arg_types);
2723 if (last_parm_void)
2724 {
2725 if (new_reversed)
2726 TREE_CHAIN (new_arg_types) = void_list_node;
2727 else
2728 new_reversed = void_list_node;
2729 }
2730 }
2731
2732 /* Use copy_node to preserve as much as possible from original type
2733 (debug info, attribute lists etc.)
2734 Exception is METHOD_TYPEs must have THIS argument.
2735 When we are asked to remove it, we need to build new FUNCTION_TYPE
2736 instead. */
2737 if (TREE_CODE (orig_type) != METHOD_TYPE
2738 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0).copy_param
2739 && VEC_index (ipa_parm_adjustment_t, adjustments, 0).base_index == 0))
2740 {
2741 new_type = build_distinct_type_copy (orig_type);
2742 TYPE_ARG_TYPES (new_type) = new_reversed;
2743 }
2744 else
2745 {
2746 new_type
2747 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2748 new_reversed));
2749 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2750 DECL_VINDEX (fndecl) = NULL_TREE;
2751 }
2752
2753 /* When signature changes, we need to clear builtin info. */
2754 if (DECL_BUILT_IN (fndecl))
2755 {
2756 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2757 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2758 }
2759
2760 /* This is a new type, not a copy of an old type. Need to reassociate
2761 variants. We can handle everything except the main variant lazily. */
2762 t = TYPE_MAIN_VARIANT (orig_type);
2763 if (orig_type != t)
2764 {
2765 TYPE_MAIN_VARIANT (new_type) = t;
2766 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2767 TYPE_NEXT_VARIANT (t) = new_type;
2768 }
2769 else
2770 {
2771 TYPE_MAIN_VARIANT (new_type) = new_type;
2772 TYPE_NEXT_VARIANT (new_type) = NULL;
2773 }
2774
2775 TREE_TYPE (fndecl) = new_type;
2776 DECL_VIRTUAL_P (fndecl) = 0;
2777 if (otypes)
2778 VEC_free (tree, heap, otypes);
2779 VEC_free (tree, heap, oparms);
2780 }
2781
2782 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2783 If this is a directly recursive call, CS must be NULL. Otherwise it must
2784 contain the corresponding call graph edge. */
2785
2786 void
2787 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2788 ipa_parm_adjustment_vec adjustments)
2789 {
2790 VEC(tree, heap) *vargs;
2791 VEC(tree, gc) **debug_args = NULL;
2792 gimple new_stmt;
2793 gimple_stmt_iterator gsi;
2794 tree callee_decl;
2795 int i, len;
2796
2797 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2798 vargs = VEC_alloc (tree, heap, len);
2799 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
2800
2801 gsi = gsi_for_stmt (stmt);
2802 for (i = 0; i < len; i++)
2803 {
2804 struct ipa_parm_adjustment *adj;
2805
2806 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2807
2808 if (adj->copy_param)
2809 {
2810 tree arg = gimple_call_arg (stmt, adj->base_index);
2811
2812 VEC_quick_push (tree, vargs, arg);
2813 }
2814 else if (!adj->remove_param)
2815 {
2816 tree expr, base, off;
2817 location_t loc;
2818
2819 /* We create a new parameter out of the value of the old one, we can
2820 do the following kind of transformations:
2821
2822 - A scalar passed by reference is converted to a scalar passed by
2823 value. (adj->by_ref is false and the type of the original
2824 actual argument is a pointer to a scalar).
2825
2826 - A part of an aggregate is passed instead of the whole aggregate.
2827 The part can be passed either by value or by reference, this is
2828 determined by value of adj->by_ref. Moreover, the code below
2829 handles both situations when the original aggregate is passed by
2830 value (its type is not a pointer) and when it is passed by
2831 reference (it is a pointer to an aggregate).
2832
2833 When the new argument is passed by reference (adj->by_ref is true)
2834 it must be a part of an aggregate and therefore we form it by
2835 simply taking the address of a reference inside the original
2836 aggregate. */
2837
2838 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2839 base = gimple_call_arg (stmt, adj->base_index);
2840 loc = EXPR_LOCATION (base);
2841
2842 if (TREE_CODE (base) != ADDR_EXPR
2843 && POINTER_TYPE_P (TREE_TYPE (base)))
2844 off = build_int_cst (adj->alias_ptr_type,
2845 adj->offset / BITS_PER_UNIT);
2846 else
2847 {
2848 HOST_WIDE_INT base_offset;
2849 tree prev_base;
2850
2851 if (TREE_CODE (base) == ADDR_EXPR)
2852 base = TREE_OPERAND (base, 0);
2853 prev_base = base;
2854 base = get_addr_base_and_unit_offset (base, &base_offset);
2855 /* Aggregate arguments can have non-invariant addresses. */
2856 if (!base)
2857 {
2858 base = build_fold_addr_expr (prev_base);
2859 off = build_int_cst (adj->alias_ptr_type,
2860 adj->offset / BITS_PER_UNIT);
2861 }
2862 else if (TREE_CODE (base) == MEM_REF)
2863 {
2864 off = build_int_cst (adj->alias_ptr_type,
2865 base_offset
2866 + adj->offset / BITS_PER_UNIT);
2867 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2868 off);
2869 base = TREE_OPERAND (base, 0);
2870 }
2871 else
2872 {
2873 off = build_int_cst (adj->alias_ptr_type,
2874 base_offset
2875 + adj->offset / BITS_PER_UNIT);
2876 base = build_fold_addr_expr (base);
2877 }
2878 }
2879
2880 if (!adj->by_ref)
2881 {
2882 tree type = adj->type;
2883 unsigned int align;
2884 unsigned HOST_WIDE_INT misalign;
2885
2886 get_pointer_alignment_1 (base, &align, &misalign);
2887 misalign += (tree_to_double_int (off)
2888 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
2889 * BITS_PER_UNIT);
2890 misalign = misalign & (align - 1);
2891 if (misalign != 0)
2892 align = (misalign & -misalign);
2893 if (align < TYPE_ALIGN (type))
2894 type = build_aligned_type (type, align);
2895 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2896 }
2897 else
2898 {
2899 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2900 expr = build_fold_addr_expr (expr);
2901 }
2902
2903 expr = force_gimple_operand_gsi (&gsi, expr,
2904 adj->by_ref
2905 || is_gimple_reg_type (adj->type),
2906 NULL, true, GSI_SAME_STMT);
2907 VEC_quick_push (tree, vargs, expr);
2908 }
2909 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2910 {
2911 unsigned int ix;
2912 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2913 gimple def_temp;
2914
2915 arg = gimple_call_arg (stmt, adj->base_index);
2916 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2917 {
2918 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2919 continue;
2920 arg = fold_convert_loc (gimple_location (stmt),
2921 TREE_TYPE (origin), arg);
2922 }
2923 if (debug_args == NULL)
2924 debug_args = decl_debug_args_insert (callee_decl);
2925 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
2926 if (ddecl == origin)
2927 {
2928 ddecl = VEC_index (tree, *debug_args, ix + 1);
2929 break;
2930 }
2931 if (ddecl == NULL)
2932 {
2933 ddecl = make_node (DEBUG_EXPR_DECL);
2934 DECL_ARTIFICIAL (ddecl) = 1;
2935 TREE_TYPE (ddecl) = TREE_TYPE (origin);
2936 DECL_MODE (ddecl) = DECL_MODE (origin);
2937
2938 VEC_safe_push (tree, gc, *debug_args, origin);
2939 VEC_safe_push (tree, gc, *debug_args, ddecl);
2940 }
2941 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
2942 stmt);
2943 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
2944 }
2945 }
2946
2947 if (dump_file && (dump_flags & TDF_DETAILS))
2948 {
2949 fprintf (dump_file, "replacing stmt:");
2950 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2951 }
2952
2953 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2954 VEC_free (tree, heap, vargs);
2955 if (gimple_call_lhs (stmt))
2956 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2957
2958 gimple_set_block (new_stmt, gimple_block (stmt));
2959 if (gimple_has_location (stmt))
2960 gimple_set_location (new_stmt, gimple_location (stmt));
2961 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2962 gimple_call_copy_flags (new_stmt, stmt);
2963
2964 if (dump_file && (dump_flags & TDF_DETAILS))
2965 {
2966 fprintf (dump_file, "with stmt:");
2967 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2968 fprintf (dump_file, "\n");
2969 }
2970 gsi_replace (&gsi, new_stmt, true);
2971 if (cs)
2972 cgraph_set_call_stmt (cs, new_stmt);
2973 update_ssa (TODO_update_ssa);
2974 free_dominance_info (CDI_DOMINATORS);
2975 }
2976
2977 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2978
2979 static bool
2980 index_in_adjustments_multiple_times_p (int base_index,
2981 ipa_parm_adjustment_vec adjustments)
2982 {
2983 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2984 bool one = false;
2985
2986 for (i = 0; i < len; i++)
2987 {
2988 struct ipa_parm_adjustment *adj;
2989 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2990
2991 if (adj->base_index == base_index)
2992 {
2993 if (one)
2994 return true;
2995 else
2996 one = true;
2997 }
2998 }
2999 return false;
3000 }
3001
3002
3003 /* Return adjustments that should have the same effect on function parameters
3004 and call arguments as if they were first changed according to adjustments in
3005 INNER and then by adjustments in OUTER. */
3006
3007 ipa_parm_adjustment_vec
3008 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3009 ipa_parm_adjustment_vec outer)
3010 {
3011 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
3012 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
3013 int removals = 0;
3014 ipa_parm_adjustment_vec adjustments, tmp;
3015
3016 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
3017 for (i = 0; i < inlen; i++)
3018 {
3019 struct ipa_parm_adjustment *n;
3020 n = &VEC_index (ipa_parm_adjustment_t, inner, i);
3021
3022 if (n->remove_param)
3023 removals++;
3024 else
3025 VEC_quick_push (ipa_parm_adjustment_t, tmp, *n);
3026 }
3027
3028 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
3029 for (i = 0; i < outlen; i++)
3030 {
3031 struct ipa_parm_adjustment r;
3032 struct ipa_parm_adjustment *out = &VEC_index (ipa_parm_adjustment_t,
3033 outer, i);
3034 struct ipa_parm_adjustment *in = &VEC_index (ipa_parm_adjustment_t, tmp,
3035 out->base_index);
3036
3037 memset (&r, 0, sizeof (r));
3038 gcc_assert (!in->remove_param);
3039 if (out->remove_param)
3040 {
3041 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3042 {
3043 r.remove_param = true;
3044 VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
3045 }
3046 continue;
3047 }
3048
3049 r.base_index = in->base_index;
3050 r.type = out->type;
3051
3052 /* FIXME: Create nonlocal value too. */
3053
3054 if (in->copy_param && out->copy_param)
3055 r.copy_param = true;
3056 else if (in->copy_param)
3057 r.offset = out->offset;
3058 else if (out->copy_param)
3059 r.offset = in->offset;
3060 else
3061 r.offset = in->offset + out->offset;
3062 VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
3063 }
3064
3065 for (i = 0; i < inlen; i++)
3066 {
3067 struct ipa_parm_adjustment *n = &VEC_index (ipa_parm_adjustment_t,
3068 inner, i);
3069
3070 if (n->remove_param)
3071 VEC_quick_push (ipa_parm_adjustment_t, adjustments, *n);
3072 }
3073
3074 VEC_free (ipa_parm_adjustment_t, heap, tmp);
3075 return adjustments;
3076 }
3077
3078 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3079 friendly way, assuming they are meant to be applied to FNDECL. */
3080
3081 void
3082 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3083 tree fndecl)
3084 {
3085 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
3086 bool first = true;
3087 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
3088
3089 fprintf (file, "IPA param adjustments: ");
3090 for (i = 0; i < len; i++)
3091 {
3092 struct ipa_parm_adjustment *adj;
3093 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
3094
3095 if (!first)
3096 fprintf (file, " ");
3097 else
3098 first = false;
3099
3100 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3101 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
3102 if (adj->base)
3103 {
3104 fprintf (file, ", base: ");
3105 print_generic_expr (file, adj->base, 0);
3106 }
3107 if (adj->reduction)
3108 {
3109 fprintf (file, ", reduction: ");
3110 print_generic_expr (file, adj->reduction, 0);
3111 }
3112 if (adj->new_ssa_base)
3113 {
3114 fprintf (file, ", new_ssa_base: ");
3115 print_generic_expr (file, adj->new_ssa_base, 0);
3116 }
3117
3118 if (adj->copy_param)
3119 fprintf (file, ", copy_param");
3120 else if (adj->remove_param)
3121 fprintf (file, ", remove_param");
3122 else
3123 fprintf (file, ", offset %li", (long) adj->offset);
3124 if (adj->by_ref)
3125 fprintf (file, ", by_ref");
3126 print_node_brief (file, ", type: ", adj->type, 0);
3127 fprintf (file, "\n");
3128 }
3129 VEC_free (tree, heap, parms);
3130 }
3131
3132 /* Stream out jump function JUMP_FUNC to OB. */
3133
3134 static void
3135 ipa_write_jump_function (struct output_block *ob,
3136 struct ipa_jump_func *jump_func)
3137 {
3138 struct ipa_agg_jf_item *item;
3139 struct bitpack_d bp;
3140 int i, count;
3141
3142 streamer_write_uhwi (ob, jump_func->type);
3143 switch (jump_func->type)
3144 {
3145 case IPA_JF_UNKNOWN:
3146 break;
3147 case IPA_JF_KNOWN_TYPE:
3148 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3149 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3150 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3151 break;
3152 case IPA_JF_CONST:
3153 stream_write_tree (ob, jump_func->value.constant, true);
3154 break;
3155 case IPA_JF_PASS_THROUGH:
3156 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3157 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3158 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3159 bp = bitpack_create (ob->main_stream);
3160 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3161 streamer_write_bitpack (&bp);
3162 break;
3163 case IPA_JF_ANCESTOR:
3164 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3165 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3166 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3167 bp = bitpack_create (ob->main_stream);
3168 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3169 streamer_write_bitpack (&bp);
3170 break;
3171 }
3172
3173 count = VEC_length (ipa_agg_jf_item_t, jump_func->agg.items);
3174 streamer_write_uhwi (ob, count);
3175 if (count)
3176 {
3177 bp = bitpack_create (ob->main_stream);
3178 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3179 streamer_write_bitpack (&bp);
3180 }
3181
3182 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items, i, item)
3183 {
3184 streamer_write_uhwi (ob, item->offset);
3185 stream_write_tree (ob, item->value, true);
3186 }
3187 }
3188
3189 /* Read in jump function JUMP_FUNC from IB. */
3190
3191 static void
3192 ipa_read_jump_function (struct lto_input_block *ib,
3193 struct ipa_jump_func *jump_func,
3194 struct data_in *data_in)
3195 {
3196 struct bitpack_d bp;
3197 int i, count;
3198
3199 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
3200 switch (jump_func->type)
3201 {
3202 case IPA_JF_UNKNOWN:
3203 break;
3204 case IPA_JF_KNOWN_TYPE:
3205 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
3206 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
3207 jump_func->value.known_type.component_type = stream_read_tree (ib,
3208 data_in);
3209 break;
3210 case IPA_JF_CONST:
3211 jump_func->value.constant = stream_read_tree (ib, data_in);
3212 break;
3213 case IPA_JF_PASS_THROUGH:
3214 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
3215 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
3216 jump_func->value.pass_through.operation
3217 = (enum tree_code) streamer_read_uhwi (ib);
3218 bp = streamer_read_bitpack (ib);
3219 jump_func->value.pass_through.agg_preserved = bp_unpack_value (&bp, 1);
3220 break;
3221 case IPA_JF_ANCESTOR:
3222 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
3223 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
3224 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
3225 bp = streamer_read_bitpack (ib);
3226 jump_func->value.ancestor.agg_preserved = bp_unpack_value (&bp, 1);
3227 break;
3228 }
3229
3230 count = streamer_read_uhwi (ib);
3231 jump_func->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, count);
3232 if (count)
3233 {
3234 bp = streamer_read_bitpack (ib);
3235 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3236 }
3237 for (i = 0; i < count; i++)
3238 {
3239 struct ipa_agg_jf_item item;
3240 item.offset = streamer_read_uhwi (ib);
3241 item.value = stream_read_tree (ib, data_in);
3242 VEC_quick_push (ipa_agg_jf_item_t, jump_func->agg.items, item);
3243 }
3244 }
3245
3246 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3247 relevant to indirect inlining to OB. */
3248
3249 static void
3250 ipa_write_indirect_edge_info (struct output_block *ob,
3251 struct cgraph_edge *cs)
3252 {
3253 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3254 struct bitpack_d bp;
3255
3256 streamer_write_hwi (ob, ii->param_index);
3257 streamer_write_hwi (ob, ii->offset);
3258 bp = bitpack_create (ob->main_stream);
3259 bp_pack_value (&bp, ii->polymorphic, 1);
3260 bp_pack_value (&bp, ii->agg_contents, 1);
3261 bp_pack_value (&bp, ii->by_ref, 1);
3262 streamer_write_bitpack (&bp);
3263
3264 if (ii->polymorphic)
3265 {
3266 streamer_write_hwi (ob, ii->otr_token);
3267 stream_write_tree (ob, ii->otr_type, true);
3268 }
3269 }
3270
3271 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3272 relevant to indirect inlining from IB. */
3273
3274 static void
3275 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3276 struct data_in *data_in ATTRIBUTE_UNUSED,
3277 struct cgraph_edge *cs)
3278 {
3279 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3280 struct bitpack_d bp;
3281
3282 ii->param_index = (int) streamer_read_hwi (ib);
3283 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3284 bp = streamer_read_bitpack (ib);
3285 ii->polymorphic = bp_unpack_value (&bp, 1);
3286 ii->agg_contents = bp_unpack_value (&bp, 1);
3287 ii->by_ref = bp_unpack_value (&bp, 1);
3288 if (ii->polymorphic)
3289 {
3290 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3291 ii->otr_type = stream_read_tree (ib, data_in);
3292 }
3293 }
3294
3295 /* Stream out NODE info to OB. */
3296
3297 static void
3298 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3299 {
3300 int node_ref;
3301 lto_symtab_encoder_t encoder;
3302 struct ipa_node_params *info = IPA_NODE_REF (node);
3303 int j;
3304 struct cgraph_edge *e;
3305 struct bitpack_d bp;
3306
3307 encoder = ob->decl_state->symtab_node_encoder;
3308 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3309 streamer_write_uhwi (ob, node_ref);
3310
3311 bp = bitpack_create (ob->main_stream);
3312 gcc_assert (info->uses_analysis_done
3313 || ipa_get_param_count (info) == 0);
3314 gcc_assert (!info->node_enqueued);
3315 gcc_assert (!info->ipcp_orig_node);
3316 for (j = 0; j < ipa_get_param_count (info); j++)
3317 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3318 streamer_write_bitpack (&bp);
3319 for (e = node->callees; e; e = e->next_callee)
3320 {
3321 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3322
3323 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3324 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3325 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3326 }
3327 for (e = node->indirect_calls; e; e = e->next_callee)
3328 {
3329 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3330
3331 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3332 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3333 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3334 ipa_write_indirect_edge_info (ob, e);
3335 }
3336 }
3337
3338 /* Stream in NODE info from IB. */
3339
3340 static void
3341 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3342 struct data_in *data_in)
3343 {
3344 struct ipa_node_params *info = IPA_NODE_REF (node);
3345 int k;
3346 struct cgraph_edge *e;
3347 struct bitpack_d bp;
3348
3349 ipa_initialize_node_params (node);
3350
3351 bp = streamer_read_bitpack (ib);
3352 if (ipa_get_param_count (info) != 0)
3353 info->uses_analysis_done = true;
3354 info->node_enqueued = false;
3355 for (k = 0; k < ipa_get_param_count (info); k++)
3356 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3357 for (e = node->callees; e; e = e->next_callee)
3358 {
3359 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3360 int count = streamer_read_uhwi (ib);
3361
3362 if (!count)
3363 continue;
3364 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
3365
3366 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3367 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
3368 }
3369 for (e = node->indirect_calls; e; e = e->next_callee)
3370 {
3371 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3372 int count = streamer_read_uhwi (ib);
3373
3374 if (count)
3375 {
3376 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
3377 count);
3378 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3379 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
3380 data_in);
3381 }
3382 ipa_read_indirect_edge_info (ib, data_in, e);
3383 }
3384 }
3385
3386 /* Write jump functions for nodes in SET. */
3387
3388 void
3389 ipa_prop_write_jump_functions (void)
3390 {
3391 struct cgraph_node *node;
3392 struct output_block *ob;
3393 unsigned int count = 0;
3394 lto_symtab_encoder_iterator lsei;
3395 lto_symtab_encoder_t encoder;
3396
3397
3398 if (!ipa_node_params_vector)
3399 return;
3400
3401 ob = create_output_block (LTO_section_jump_functions);
3402 encoder = ob->decl_state->symtab_node_encoder;
3403 ob->cgraph_node = NULL;
3404 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3405 lsei_next_function_in_partition (&lsei))
3406 {
3407 node = lsei_cgraph_node (lsei);
3408 if (cgraph_function_with_gimple_body_p (node)
3409 && IPA_NODE_REF (node) != NULL)
3410 count++;
3411 }
3412
3413 streamer_write_uhwi (ob, count);
3414
3415 /* Process all of the functions. */
3416 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3417 lsei_next_function_in_partition (&lsei))
3418 {
3419 node = lsei_cgraph_node (lsei);
3420 if (cgraph_function_with_gimple_body_p (node)
3421 && IPA_NODE_REF (node) != NULL)
3422 ipa_write_node_info (ob, node);
3423 }
3424 streamer_write_char_stream (ob->main_stream, 0);
3425 produce_asm (ob, NULL);
3426 destroy_output_block (ob);
3427 }
3428
3429 /* Read section in file FILE_DATA of length LEN with data DATA. */
3430
3431 static void
3432 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3433 size_t len)
3434 {
3435 const struct lto_function_header *header =
3436 (const struct lto_function_header *) data;
3437 const int cfg_offset = sizeof (struct lto_function_header);
3438 const int main_offset = cfg_offset + header->cfg_size;
3439 const int string_offset = main_offset + header->main_size;
3440 struct data_in *data_in;
3441 struct lto_input_block ib_main;
3442 unsigned int i;
3443 unsigned int count;
3444
3445 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3446 header->main_size);
3447
3448 data_in =
3449 lto_data_in_create (file_data, (const char *) data + string_offset,
3450 header->string_size, NULL);
3451 count = streamer_read_uhwi (&ib_main);
3452
3453 for (i = 0; i < count; i++)
3454 {
3455 unsigned int index;
3456 struct cgraph_node *node;
3457 lto_symtab_encoder_t encoder;
3458
3459 index = streamer_read_uhwi (&ib_main);
3460 encoder = file_data->symtab_node_encoder;
3461 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3462 gcc_assert (node->analyzed);
3463 ipa_read_node_info (&ib_main, node, data_in);
3464 }
3465 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3466 len);
3467 lto_data_in_delete (data_in);
3468 }
3469
3470 /* Read ipcp jump functions. */
3471
3472 void
3473 ipa_prop_read_jump_functions (void)
3474 {
3475 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3476 struct lto_file_decl_data *file_data;
3477 unsigned int j = 0;
3478
3479 ipa_check_create_node_params ();
3480 ipa_check_create_edge_args ();
3481 ipa_register_cgraph_hooks ();
3482
3483 while ((file_data = file_data_vec[j++]))
3484 {
3485 size_t len;
3486 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3487
3488 if (data)
3489 ipa_prop_read_section (file_data, data, len);
3490 }
3491 }
3492
3493 /* After merging units, we can get mismatch in argument counts.
3494 Also decl merging might've rendered parameter lists obsolete.
3495 Also compute called_with_variable_arg info. */
3496
3497 void
3498 ipa_update_after_lto_read (void)
3499 {
3500 struct cgraph_node *node;
3501
3502 ipa_check_create_node_params ();
3503 ipa_check_create_edge_args ();
3504
3505 FOR_EACH_DEFINED_FUNCTION (node)
3506 if (node->analyzed)
3507 ipa_initialize_node_params (node);
3508 }