ipa-prop.c (ipa_get_param_decl_index_1): New function.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "ipa-inline.h"
34 #include "gimple.h"
35 #include "flags.h"
36 #include "diagnostic.h"
37 #include "gimple-pretty-print.h"
38 #include "lto-streamer.h"
39 #include "data-streamer.h"
40 #include "tree-streamer.h"
41 #include "params.h"
42
43
44 /* Intermediate information about a parameter that is only useful during the
45 run of ipa_analyze_node and is not kept afterwards. */
46
47 struct param_analysis_info
48 {
49 bool parm_modified, ref_modified, pt_modified;
50 bitmap parm_visited_statements, pt_visited_statements;
51 };
52
53 /* Vector where the parameter infos are actually stored. */
54 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
55 /* Vector where the parameter infos are actually stored. */
56 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
57
58 /* Holders of ipa cgraph hooks: */
59 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
60 static struct cgraph_node_hook_list *node_removal_hook_holder;
61 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
62 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
63 static struct cgraph_node_hook_list *function_insertion_hook_holder;
64
65 /* Return index of the formal whose tree is PTREE in function which corresponds
66 to INFO. */
67
68 static int
69 ipa_get_param_decl_index_1 (VEC (ipa_param_descriptor_t, heap) *descriptors,
70 tree ptree)
71 {
72 int i, count;
73
74 count = VEC_length (ipa_param_descriptor_t, descriptors);
75 for (i = 0; i < count; i++)
76 if (VEC_index (ipa_param_descriptor_t, descriptors, i).decl == ptree)
77 return i;
78
79 return -1;
80 }
81
82 /* Return index of the formal whose tree is PTREE in function which corresponds
83 to INFO. */
84
85 int
86 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
87 {
88 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
89 }
90
91 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
92 NODE. */
93
94 static void
95 ipa_populate_param_decls (struct cgraph_node *node,
96 VEC (ipa_param_descriptor_t, heap) *descriptors)
97 {
98 tree fndecl;
99 tree fnargs;
100 tree parm;
101 int param_num;
102
103 fndecl = node->symbol.decl;
104 fnargs = DECL_ARGUMENTS (fndecl);
105 param_num = 0;
106 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
107 {
108 VEC_index (ipa_param_descriptor_t, descriptors, param_num).decl = parm;
109 param_num++;
110 }
111 }
112
113 /* Return how many formal parameters FNDECL has. */
114
115 static inline int
116 count_formal_params (tree fndecl)
117 {
118 tree parm;
119 int count = 0;
120
121 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
122 count++;
123
124 return count;
125 }
126
127 /* Initialize the ipa_node_params structure associated with NODE by counting
128 the function parameters, creating the descriptors and populating their
129 param_decls. */
130
131 void
132 ipa_initialize_node_params (struct cgraph_node *node)
133 {
134 struct ipa_node_params *info = IPA_NODE_REF (node);
135
136 if (!info->descriptors)
137 {
138 int param_count;
139
140 param_count = count_formal_params (node->symbol.decl);
141 if (param_count)
142 {
143 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
144 info->descriptors, param_count);
145 ipa_populate_param_decls (node, info->descriptors);
146 }
147 }
148 }
149
150 /* Print the jump functions associated with call graph edge CS to file F. */
151
152 static void
153 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
154 {
155 int i, count;
156
157 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
158 for (i = 0; i < count; i++)
159 {
160 struct ipa_jump_func *jump_func;
161 enum jump_func_type type;
162
163 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
164 type = jump_func->type;
165
166 fprintf (f, " param %d: ", i);
167 if (type == IPA_JF_UNKNOWN)
168 fprintf (f, "UNKNOWN\n");
169 else if (type == IPA_JF_KNOWN_TYPE)
170 {
171 fprintf (f, "KNOWN TYPE: base ");
172 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
173 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
174 jump_func->value.known_type.offset);
175 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
176 fprintf (f, "\n");
177 }
178 else if (type == IPA_JF_CONST)
179 {
180 tree val = jump_func->value.constant;
181 fprintf (f, "CONST: ");
182 print_generic_expr (f, val, 0);
183 if (TREE_CODE (val) == ADDR_EXPR
184 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
185 {
186 fprintf (f, " -> ");
187 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
188 0);
189 }
190 fprintf (f, "\n");
191 }
192 else if (type == IPA_JF_PASS_THROUGH)
193 {
194 fprintf (f, "PASS THROUGH: ");
195 fprintf (f, "%d, op %s",
196 jump_func->value.pass_through.formal_id,
197 tree_code_name[(int)
198 jump_func->value.pass_through.operation]);
199 if (jump_func->value.pass_through.operation != NOP_EXPR)
200 {
201 fprintf (f, " ");
202 print_generic_expr (f,
203 jump_func->value.pass_through.operand, 0);
204 }
205 if (jump_func->value.pass_through.agg_preserved)
206 fprintf (f, ", agg_preserved");
207 fprintf (f, "\n");
208 }
209 else if (type == IPA_JF_ANCESTOR)
210 {
211 fprintf (f, "ANCESTOR: ");
212 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
213 jump_func->value.ancestor.formal_id,
214 jump_func->value.ancestor.offset);
215 print_generic_expr (f, jump_func->value.ancestor.type, 0);
216 if (jump_func->value.ancestor.agg_preserved)
217 fprintf (f, ", agg_preserved");
218 fprintf (f, "\n");
219 }
220
221 if (jump_func->agg.items)
222 {
223 struct ipa_agg_jf_item *item;
224 int j;
225
226 fprintf (f, " Aggregate passed by %s:\n",
227 jump_func->agg.by_ref ? "reference" : "value");
228 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items,
229 j, item)
230 {
231 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
232 item->offset);
233 if (TYPE_P (item->value))
234 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
235 tree_low_cst (TYPE_SIZE (item->value), 1));
236 else
237 {
238 fprintf (f, "cst: ");
239 print_generic_expr (f, item->value, 0);
240 }
241 fprintf (f, "\n");
242 }
243 }
244 }
245 }
246
247
248 /* Print the jump functions of all arguments on all call graph edges going from
249 NODE to file F. */
250
251 void
252 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
253 {
254 struct cgraph_edge *cs;
255 int i;
256
257 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
258 for (cs = node->callees; cs; cs = cs->next_callee)
259 {
260 if (!ipa_edge_args_info_available_for_edge_p (cs))
261 continue;
262
263 fprintf (f, " callsite %s/%i -> %s/%i : \n",
264 xstrdup (cgraph_node_name (node)), node->uid,
265 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
266 ipa_print_node_jump_functions_for_edge (f, cs);
267 }
268
269 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
270 {
271 if (!ipa_edge_args_info_available_for_edge_p (cs))
272 continue;
273
274 if (cs->call_stmt)
275 {
276 fprintf (f, " indirect callsite %d for stmt ", i);
277 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
278 }
279 else
280 fprintf (f, " indirect callsite %d :\n", i);
281 ipa_print_node_jump_functions_for_edge (f, cs);
282
283 }
284 }
285
286 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
287
288 void
289 ipa_print_all_jump_functions (FILE *f)
290 {
291 struct cgraph_node *node;
292
293 fprintf (f, "\nJump functions:\n");
294 FOR_EACH_FUNCTION (node)
295 {
296 ipa_print_node_jump_functions (f, node);
297 }
298 }
299
300 /* Worker for prune_expression_for_jf. */
301
302 static tree
303 prune_expression_for_jf_1 (tree *tp, int *walk_subtrees, void *)
304 {
305 if (EXPR_P (*tp))
306 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
307 else
308 *walk_subtrees = 0;
309 return NULL_TREE;
310 }
311
312 /* Return the expression tree EXPR unshared and with location stripped off. */
313
314 static tree
315 prune_expression_for_jf (tree exp)
316 {
317 if (EXPR_P (exp))
318 {
319 exp = unshare_expr (exp);
320 walk_tree (&exp, prune_expression_for_jf_1, NULL, NULL);
321 }
322 return exp;
323 }
324
325 /* Set JFUNC to be a known type jump function. */
326
327 static void
328 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
329 tree base_type, tree component_type)
330 {
331 jfunc->type = IPA_JF_KNOWN_TYPE;
332 jfunc->value.known_type.offset = offset,
333 jfunc->value.known_type.base_type = base_type;
334 jfunc->value.known_type.component_type = component_type;
335 }
336
337 /* Set JFUNC to be a constant jmp function. */
338
339 static void
340 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
341 {
342 constant = unshare_expr (constant);
343 if (constant && EXPR_P (constant))
344 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
345 jfunc->type = IPA_JF_CONST;
346 jfunc->value.constant = prune_expression_for_jf (constant);
347 }
348
349 /* Set JFUNC to be a simple pass-through jump function. */
350 static void
351 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
352 bool agg_preserved)
353 {
354 jfunc->type = IPA_JF_PASS_THROUGH;
355 jfunc->value.pass_through.operand = NULL_TREE;
356 jfunc->value.pass_through.formal_id = formal_id;
357 jfunc->value.pass_through.operation = NOP_EXPR;
358 jfunc->value.pass_through.agg_preserved = agg_preserved;
359 }
360
361 /* Set JFUNC to be an arithmetic pass through jump function. */
362
363 static void
364 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
365 tree operand, enum tree_code operation)
366 {
367 jfunc->type = IPA_JF_PASS_THROUGH;
368 jfunc->value.pass_through.operand = prune_expression_for_jf (operand);
369 jfunc->value.pass_through.formal_id = formal_id;
370 jfunc->value.pass_through.operation = operation;
371 jfunc->value.pass_through.agg_preserved = false;
372 }
373
374 /* Set JFUNC to be an ancestor jump function. */
375
376 static void
377 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
378 tree type, int formal_id, bool agg_preserved)
379 {
380 jfunc->type = IPA_JF_ANCESTOR;
381 jfunc->value.ancestor.formal_id = formal_id;
382 jfunc->value.ancestor.offset = offset;
383 jfunc->value.ancestor.type = type;
384 jfunc->value.ancestor.agg_preserved = agg_preserved;
385 }
386
387 /* Structure to be passed in between detect_type_change and
388 check_stmt_for_type_change. */
389
390 struct type_change_info
391 {
392 /* Offset into the object where there is the virtual method pointer we are
393 looking for. */
394 HOST_WIDE_INT offset;
395 /* The declaration or SSA_NAME pointer of the base that we are checking for
396 type change. */
397 tree object;
398 /* If we actually can tell the type that the object has changed to, it is
399 stored in this field. Otherwise it remains NULL_TREE. */
400 tree known_current_type;
401 /* Set to true if dynamic type change has been detected. */
402 bool type_maybe_changed;
403 /* Set to true if multiple types have been encountered. known_current_type
404 must be disregarded in that case. */
405 bool multiple_types_encountered;
406 };
407
408 /* Return true if STMT can modify a virtual method table pointer.
409
410 This function makes special assumptions about both constructors and
411 destructors which are all the functions that are allowed to alter the VMT
412 pointers. It assumes that destructors begin with assignment into all VMT
413 pointers and that constructors essentially look in the following way:
414
415 1) The very first thing they do is that they call constructors of ancestor
416 sub-objects that have them.
417
418 2) Then VMT pointers of this and all its ancestors is set to new values
419 corresponding to the type corresponding to the constructor.
420
421 3) Only afterwards, other stuff such as constructor of member sub-objects
422 and the code written by the user is run. Only this may include calling
423 virtual functions, directly or indirectly.
424
425 There is no way to call a constructor of an ancestor sub-object in any
426 other way.
427
428 This means that we do not have to care whether constructors get the correct
429 type information because they will always change it (in fact, if we define
430 the type to be given by the VMT pointer, it is undefined).
431
432 The most important fact to derive from the above is that if, for some
433 statement in the section 3, we try to detect whether the dynamic type has
434 changed, we can safely ignore all calls as we examine the function body
435 backwards until we reach statements in section 2 because these calls cannot
436 be ancestor constructors or destructors (if the input is not bogus) and so
437 do not change the dynamic type (this holds true only for automatically
438 allocated objects but at the moment we devirtualize only these). We then
439 must detect that statements in section 2 change the dynamic type and can try
440 to derive the new type. That is enough and we can stop, we will never see
441 the calls into constructors of sub-objects in this code. Therefore we can
442 safely ignore all call statements that we traverse.
443 */
444
445 static bool
446 stmt_may_be_vtbl_ptr_store (gimple stmt)
447 {
448 if (is_gimple_call (stmt))
449 return false;
450 else if (is_gimple_assign (stmt))
451 {
452 tree lhs = gimple_assign_lhs (stmt);
453
454 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
455 {
456 if (flag_strict_aliasing
457 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
458 return false;
459
460 if (TREE_CODE (lhs) == COMPONENT_REF
461 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
462 return false;
463 /* In the future we might want to use get_base_ref_and_offset to find
464 if there is a field corresponding to the offset and if so, proceed
465 almost like if it was a component ref. */
466 }
467 }
468 return true;
469 }
470
471 /* If STMT can be proved to be an assignment to the virtual method table
472 pointer of ANALYZED_OBJ and the type associated with the new table
473 identified, return the type. Otherwise return NULL_TREE. */
474
475 static tree
476 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
477 {
478 HOST_WIDE_INT offset, size, max_size;
479 tree lhs, rhs, base;
480
481 if (!gimple_assign_single_p (stmt))
482 return NULL_TREE;
483
484 lhs = gimple_assign_lhs (stmt);
485 rhs = gimple_assign_rhs1 (stmt);
486 if (TREE_CODE (lhs) != COMPONENT_REF
487 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
488 || TREE_CODE (rhs) != ADDR_EXPR)
489 return NULL_TREE;
490 rhs = get_base_address (TREE_OPERAND (rhs, 0));
491 if (!rhs
492 || TREE_CODE (rhs) != VAR_DECL
493 || !DECL_VIRTUAL_P (rhs))
494 return NULL_TREE;
495
496 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
497 if (offset != tci->offset
498 || size != POINTER_SIZE
499 || max_size != POINTER_SIZE)
500 return NULL_TREE;
501 if (TREE_CODE (base) == MEM_REF)
502 {
503 if (TREE_CODE (tci->object) != MEM_REF
504 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
505 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
506 TREE_OPERAND (base, 1)))
507 return NULL_TREE;
508 }
509 else if (tci->object != base)
510 return NULL_TREE;
511
512 return DECL_CONTEXT (rhs);
513 }
514
515 /* Callback of walk_aliased_vdefs and a helper function for
516 detect_type_change to check whether a particular statement may modify
517 the virtual table pointer, and if possible also determine the new type of
518 the (sub-)object. It stores its result into DATA, which points to a
519 type_change_info structure. */
520
521 static bool
522 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
523 {
524 gimple stmt = SSA_NAME_DEF_STMT (vdef);
525 struct type_change_info *tci = (struct type_change_info *) data;
526
527 if (stmt_may_be_vtbl_ptr_store (stmt))
528 {
529 tree type;
530 type = extr_type_from_vtbl_ptr_store (stmt, tci);
531 if (tci->type_maybe_changed
532 && type != tci->known_current_type)
533 tci->multiple_types_encountered = true;
534 tci->known_current_type = type;
535 tci->type_maybe_changed = true;
536 return true;
537 }
538 else
539 return false;
540 }
541
542
543
544 /* Like detect_type_change but with extra argument COMP_TYPE which will become
545 the component type part of new JFUNC of dynamic type change is detected and
546 the new base type is identified. */
547
548 static bool
549 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
550 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
551 {
552 struct type_change_info tci;
553 ao_ref ao;
554
555 gcc_checking_assert (DECL_P (arg)
556 || TREE_CODE (arg) == MEM_REF
557 || handled_component_p (arg));
558 /* Const calls cannot call virtual methods through VMT and so type changes do
559 not matter. */
560 if (!flag_devirtualize || !gimple_vuse (call))
561 return false;
562
563 ao_ref_init (&ao, arg);
564 ao.base = base;
565 ao.offset = offset;
566 ao.size = POINTER_SIZE;
567 ao.max_size = ao.size;
568
569 tci.offset = offset;
570 tci.object = get_base_address (arg);
571 tci.known_current_type = NULL_TREE;
572 tci.type_maybe_changed = false;
573 tci.multiple_types_encountered = false;
574
575 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
576 &tci, NULL);
577 if (!tci.type_maybe_changed)
578 return false;
579
580 if (!tci.known_current_type
581 || tci.multiple_types_encountered
582 || offset != 0)
583 jfunc->type = IPA_JF_UNKNOWN;
584 else
585 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
586
587 return true;
588 }
589
590 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
591 looking for assignments to its virtual table pointer. If it is, return true
592 and fill in the jump function JFUNC with relevant type information or set it
593 to unknown. ARG is the object itself (not a pointer to it, unless
594 dereferenced). BASE is the base of the memory access as returned by
595 get_ref_base_and_extent, as is the offset. */
596
597 static bool
598 detect_type_change (tree arg, tree base, gimple call,
599 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
600 {
601 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
602 }
603
604 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
605 SSA name (its dereference will become the base and the offset is assumed to
606 be zero). */
607
608 static bool
609 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
610 {
611 tree comp_type;
612
613 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
614 if (!flag_devirtualize
615 || !POINTER_TYPE_P (TREE_TYPE (arg))
616 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
617 return false;
618
619 comp_type = TREE_TYPE (TREE_TYPE (arg));
620 arg = build2 (MEM_REF, ptr_type_node, arg,
621 build_int_cst (ptr_type_node, 0));
622
623 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
624 }
625
626 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
627 boolean variable pointed to by DATA. */
628
629 static bool
630 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
631 void *data)
632 {
633 bool *b = (bool *) data;
634 *b = true;
635 return true;
636 }
637
638 /* Return true if a load from a formal parameter PARM_LOAD is known to retreive
639 a value known not to be modified in this function before reaching the
640 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
641 information about the parameter. */
642
643 static bool
644 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
645 gimple stmt, tree parm_load)
646 {
647 bool modified = false;
648 bitmap *visited_stmts;
649 ao_ref refd;
650
651 if (parm_ainfo && parm_ainfo->parm_modified)
652 return false;
653
654 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
655 ao_ref_init (&refd, parm_load);
656 /* We can cache visited statements only when parm_ainfo is available and when
657 we are looking at a naked load of the whole parameter. */
658 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
659 visited_stmts = NULL;
660 else
661 visited_stmts = &parm_ainfo->parm_visited_statements;
662 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
663 visited_stmts);
664 if (parm_ainfo && modified)
665 parm_ainfo->parm_modified = true;
666 return !modified;
667 }
668
669 /* If STMT is an assignment that loads a value from an parameter declaration,
670 return the index of the parameter in ipa_node_params which has not been
671 modified. Otherwise return -1. */
672
673 static int
674 load_from_unmodified_param (VEC (ipa_param_descriptor_t, heap) *descriptors,
675 struct param_analysis_info *parms_ainfo,
676 gimple stmt)
677 {
678 int index;
679 tree op1;
680
681 if (!gimple_assign_single_p (stmt))
682 return -1;
683
684 op1 = gimple_assign_rhs1 (stmt);
685 if (TREE_CODE (op1) != PARM_DECL)
686 return -1;
687
688 index = ipa_get_param_decl_index_1 (descriptors, op1);
689 if (index < 0
690 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
691 : NULL, stmt, op1))
692 return -1;
693
694 return index;
695 }
696
697 /* Return true if memory reference REF loads data that are known to be
698 unmodified in this function before reaching statement STMT. PARM_AINFO, if
699 non-NULL, is a pointer to a structure containing temporary information about
700 PARM. */
701
702 static bool
703 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
704 gimple stmt, tree ref)
705 {
706 bool modified = false;
707 ao_ref refd;
708
709 gcc_checking_assert (gimple_vuse (stmt));
710 if (parm_ainfo && parm_ainfo->ref_modified)
711 return false;
712
713 ao_ref_init (&refd, ref);
714 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
715 NULL);
716 if (parm_ainfo && modified)
717 parm_ainfo->ref_modified = true;
718 return !modified;
719 }
720
721 /* Return true if the data pointed to by PARM is known to be unmodified in this
722 function before reaching call statement CALL into which it is passed.
723 PARM_AINFO is a pointer to a structure containing temporary information
724 about PARM. */
725
726 static bool
727 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
728 gimple call, tree parm)
729 {
730 bool modified = false;
731 ao_ref refd;
732
733 /* It's unnecessary to calculate anything about memory contnets for a const
734 function because it is not goin to use it. But do not cache the result
735 either. Also, no such calculations for non-pointers. */
736 if (!gimple_vuse (call)
737 || !POINTER_TYPE_P (TREE_TYPE (parm)))
738 return false;
739
740 if (parm_ainfo->pt_modified)
741 return false;
742
743 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
744 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
745 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
746 if (modified)
747 parm_ainfo->pt_modified = true;
748 return !modified;
749 }
750
751 /* Return true if we can prove that OP is a memory reference loading unmodified
752 data from an aggregate passed as a parameter and if the aggregate is passed
753 by reference, that the alias type of the load corresponds to the type of the
754 formal parameter (so that we can rely on this type for TBAA in callers).
755 INFO and PARMS_AINFO describe parameters of the current function (but the
756 latter can be NULL), STMT is the load statement. If function returns true,
757 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
758 within the aggregate and whether it is a load from a value passed by
759 reference respectively. */
760
761 static bool
762 ipa_load_from_parm_agg_1 (VEC (ipa_param_descriptor_t, heap) *descriptors,
763 struct param_analysis_info *parms_ainfo, gimple stmt,
764 tree op, int *index_p, HOST_WIDE_INT *offset_p,
765 bool *by_ref_p)
766 {
767 int index;
768 HOST_WIDE_INT size, max_size;
769 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
770
771 if (max_size == -1 || max_size != size || *offset_p < 0)
772 return false;
773
774 if (DECL_P (base))
775 {
776 int index = ipa_get_param_decl_index_1 (descriptors, base);
777 if (index >= 0
778 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
779 : NULL, stmt, op))
780 {
781 *index_p = index;
782 *by_ref_p = false;
783 return true;
784 }
785 return false;
786 }
787
788 if (TREE_CODE (base) != MEM_REF
789 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
790 || !integer_zerop (TREE_OPERAND (base, 1)))
791 return false;
792
793 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
794 {
795 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
796 index = ipa_get_param_decl_index_1 (descriptors, parm);
797 }
798 else
799 {
800 /* This branch catches situations where a pointer parameter is not a
801 gimple register, for example:
802
803 void hip7(S*) (struct S * p)
804 {
805 void (*<T2e4>) (struct S *) D.1867;
806 struct S * p.1;
807
808 <bb 2>:
809 p.1_1 = p;
810 D.1867_2 = p.1_1->f;
811 D.1867_2 ();
812 gdp = &p;
813 */
814
815 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
816 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
817 }
818
819 if (index >= 0
820 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
821 stmt, op))
822 {
823 *index_p = index;
824 *by_ref_p = true;
825 return true;
826 }
827 return false;
828 }
829
830 /* Just like the previous function, just without the param_analysis_info
831 pointer, for users outside of this file. */
832
833 bool
834 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
835 tree op, int *index_p, HOST_WIDE_INT *offset_p,
836 bool *by_ref_p)
837 {
838 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
839 offset_p, by_ref_p);
840 }
841
842 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
843 of an assignment statement STMT, try to determine whether we are actually
844 handling any of the following cases and construct an appropriate jump
845 function into JFUNC if so:
846
847 1) The passed value is loaded from a formal parameter which is not a gimple
848 register (most probably because it is addressable, the value has to be
849 scalar) and we can guarantee the value has not changed. This case can
850 therefore be described by a simple pass-through jump function. For example:
851
852 foo (int a)
853 {
854 int a.0;
855
856 a.0_2 = a;
857 bar (a.0_2);
858
859 2) The passed value can be described by a simple arithmetic pass-through
860 jump function. E.g.
861
862 foo (int a)
863 {
864 int D.2064;
865
866 D.2064_4 = a.1(D) + 4;
867 bar (D.2064_4);
868
869 This case can also occur in combination of the previous one, e.g.:
870
871 foo (int a, int z)
872 {
873 int a.0;
874 int D.2064;
875
876 a.0_3 = a;
877 D.2064_4 = a.0_3 + 4;
878 foo (D.2064_4);
879
880 3) The passed value is an address of an object within another one (which
881 also passed by reference). Such situations are described by an ancestor
882 jump function and describe situations such as:
883
884 B::foo() (struct B * const this)
885 {
886 struct A * D.1845;
887
888 D.1845_2 = &this_1(D)->D.1748;
889 A::bar (D.1845_2);
890
891 INFO is the structure describing individual parameters access different
892 stages of IPA optimizations. PARMS_AINFO contains the information that is
893 only needed for intraprocedural analysis. */
894
895 static void
896 compute_complex_assign_jump_func (struct ipa_node_params *info,
897 struct param_analysis_info *parms_ainfo,
898 struct ipa_jump_func *jfunc,
899 gimple call, gimple stmt, tree name)
900 {
901 HOST_WIDE_INT offset, size, max_size;
902 tree op1, tc_ssa, base, ssa;
903 int index;
904
905 op1 = gimple_assign_rhs1 (stmt);
906
907 if (TREE_CODE (op1) == SSA_NAME)
908 {
909 if (SSA_NAME_IS_DEFAULT_DEF (op1))
910 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
911 else
912 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
913 SSA_NAME_DEF_STMT (op1));
914 tc_ssa = op1;
915 }
916 else
917 {
918 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
919 tc_ssa = gimple_assign_lhs (stmt);
920 }
921
922 if (index >= 0)
923 {
924 tree op2 = gimple_assign_rhs2 (stmt);
925
926 if (op2)
927 {
928 if (!is_gimple_ip_invariant (op2)
929 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
930 && !useless_type_conversion_p (TREE_TYPE (name),
931 TREE_TYPE (op1))))
932 return;
933
934 ipa_set_jf_arith_pass_through (jfunc, index, op2,
935 gimple_assign_rhs_code (stmt));
936 }
937 else if (gimple_assign_single_p (stmt)
938 && !detect_type_change_ssa (tc_ssa, call, jfunc))
939 {
940 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
941 call, tc_ssa);
942 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
943 }
944 return;
945 }
946
947 if (TREE_CODE (op1) != ADDR_EXPR)
948 return;
949 op1 = TREE_OPERAND (op1, 0);
950 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
951 return;
952 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
953 if (TREE_CODE (base) != MEM_REF
954 /* If this is a varying address, punt. */
955 || max_size == -1
956 || max_size != size)
957 return;
958 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
959 ssa = TREE_OPERAND (base, 0);
960 if (TREE_CODE (ssa) != SSA_NAME
961 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
962 || offset < 0)
963 return;
964
965 /* Dynamic types are changed only in constructors and destructors and */
966 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
967 if (index >= 0
968 && !detect_type_change (op1, base, call, jfunc, offset))
969 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
970 parm_ref_data_pass_through_p (&parms_ainfo[index],
971 call, ssa));
972 }
973
974 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
975 it looks like:
976
977 iftmp.1_3 = &obj_2(D)->D.1762;
978
979 The base of the MEM_REF must be a default definition SSA NAME of a
980 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
981 whole MEM_REF expression is returned and the offset calculated from any
982 handled components and the MEM_REF itself is stored into *OFFSET. The whole
983 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
984
985 static tree
986 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
987 {
988 HOST_WIDE_INT size, max_size;
989 tree expr, parm, obj;
990
991 if (!gimple_assign_single_p (assign))
992 return NULL_TREE;
993 expr = gimple_assign_rhs1 (assign);
994
995 if (TREE_CODE (expr) != ADDR_EXPR)
996 return NULL_TREE;
997 expr = TREE_OPERAND (expr, 0);
998 obj = expr;
999 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1000
1001 if (TREE_CODE (expr) != MEM_REF
1002 /* If this is a varying address, punt. */
1003 || max_size == -1
1004 || max_size != size
1005 || *offset < 0)
1006 return NULL_TREE;
1007 parm = TREE_OPERAND (expr, 0);
1008 if (TREE_CODE (parm) != SSA_NAME
1009 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1010 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1011 return NULL_TREE;
1012
1013 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1014 *obj_p = obj;
1015 return expr;
1016 }
1017
1018
1019 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1020 statement PHI, try to find out whether NAME is in fact a
1021 multiple-inheritance typecast from a descendant into an ancestor of a formal
1022 parameter and thus can be described by an ancestor jump function and if so,
1023 write the appropriate function into JFUNC.
1024
1025 Essentially we want to match the following pattern:
1026
1027 if (obj_2(D) != 0B)
1028 goto <bb 3>;
1029 else
1030 goto <bb 4>;
1031
1032 <bb 3>:
1033 iftmp.1_3 = &obj_2(D)->D.1762;
1034
1035 <bb 4>:
1036 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1037 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1038 return D.1879_6; */
1039
1040 static void
1041 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1042 struct param_analysis_info *parms_ainfo,
1043 struct ipa_jump_func *jfunc,
1044 gimple call, gimple phi)
1045 {
1046 HOST_WIDE_INT offset;
1047 gimple assign, cond;
1048 basic_block phi_bb, assign_bb, cond_bb;
1049 tree tmp, parm, expr, obj;
1050 int index, i;
1051
1052 if (gimple_phi_num_args (phi) != 2)
1053 return;
1054
1055 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1056 tmp = PHI_ARG_DEF (phi, 0);
1057 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1058 tmp = PHI_ARG_DEF (phi, 1);
1059 else
1060 return;
1061 if (TREE_CODE (tmp) != SSA_NAME
1062 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1063 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1064 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1065 return;
1066
1067 assign = SSA_NAME_DEF_STMT (tmp);
1068 assign_bb = gimple_bb (assign);
1069 if (!single_pred_p (assign_bb))
1070 return;
1071 expr = get_ancestor_addr_info (assign, &obj, &offset);
1072 if (!expr)
1073 return;
1074 parm = TREE_OPERAND (expr, 0);
1075 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1076 gcc_assert (index >= 0);
1077
1078 cond_bb = single_pred (assign_bb);
1079 cond = last_stmt (cond_bb);
1080 if (!cond
1081 || gimple_code (cond) != GIMPLE_COND
1082 || gimple_cond_code (cond) != NE_EXPR
1083 || gimple_cond_lhs (cond) != parm
1084 || !integer_zerop (gimple_cond_rhs (cond)))
1085 return;
1086
1087 phi_bb = gimple_bb (phi);
1088 for (i = 0; i < 2; i++)
1089 {
1090 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1091 if (pred != assign_bb && pred != cond_bb)
1092 return;
1093 }
1094
1095 if (!detect_type_change (obj, expr, call, jfunc, offset))
1096 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1097 parm_ref_data_pass_through_p (&parms_ainfo[index],
1098 call, parm));
1099 }
1100
1101 /* Given OP which is passed as an actual argument to a called function,
1102 determine if it is possible to construct a KNOWN_TYPE jump function for it
1103 and if so, create one and store it to JFUNC. */
1104
1105 static void
1106 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1107 gimple call)
1108 {
1109 HOST_WIDE_INT offset, size, max_size;
1110 tree base;
1111
1112 if (!flag_devirtualize
1113 || TREE_CODE (op) != ADDR_EXPR
1114 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1115 return;
1116
1117 op = TREE_OPERAND (op, 0);
1118 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1119 if (!DECL_P (base)
1120 || max_size == -1
1121 || max_size != size
1122 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1123 || is_global_var (base))
1124 return;
1125
1126 if (!TYPE_BINFO (TREE_TYPE (base))
1127 || detect_type_change (op, base, call, jfunc, offset))
1128 return;
1129
1130 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1131 }
1132
1133 /* Inspect the given TYPE and return true iff it has the same structure (the
1134 same number of fields of the same types) as a C++ member pointer. If
1135 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1136 corresponding fields there. */
1137
1138 static bool
1139 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1140 {
1141 tree fld;
1142
1143 if (TREE_CODE (type) != RECORD_TYPE)
1144 return false;
1145
1146 fld = TYPE_FIELDS (type);
1147 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1148 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1149 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1150 return false;
1151
1152 if (method_ptr)
1153 *method_ptr = fld;
1154
1155 fld = DECL_CHAIN (fld);
1156 if (!fld || INTEGRAL_TYPE_P (fld)
1157 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1158 return false;
1159 if (delta)
1160 *delta = fld;
1161
1162 if (DECL_CHAIN (fld))
1163 return false;
1164
1165 return true;
1166 }
1167
1168 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1169 return the rhs of its defining statement. Otherwise return RHS as it
1170 is. */
1171
1172 static inline tree
1173 get_ssa_def_if_simple_copy (tree rhs)
1174 {
1175 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1176 {
1177 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1178
1179 if (gimple_assign_single_p (def_stmt))
1180 rhs = gimple_assign_rhs1 (def_stmt);
1181 else
1182 break;
1183 }
1184 return rhs;
1185 }
1186
1187 /* Simple linked list, describing known contents of an aggregate beforere
1188 call. */
1189
1190 struct ipa_known_agg_contents_list
1191 {
1192 /* Offset and size of the described part of the aggregate. */
1193 HOST_WIDE_INT offset, size;
1194 /* Known constant value or NULL if the contents is known to be unknown. */
1195 tree constant;
1196 /* Pointer to the next structure in the list. */
1197 struct ipa_known_agg_contents_list *next;
1198 };
1199
1200 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1201 in ARG is filled in with constant values. ARG can either be an aggregate
1202 expression or a pointer to an aggregate. JFUNC is the jump function into
1203 which the constants are subsequently stored. */
1204
1205 static void
1206 determine_known_aggregate_parts (gimple call, tree arg,
1207 struct ipa_jump_func *jfunc)
1208 {
1209 struct ipa_known_agg_contents_list *list = NULL;
1210 int item_count = 0, const_count = 0;
1211 HOST_WIDE_INT arg_offset, arg_size;
1212 gimple_stmt_iterator gsi;
1213 tree arg_base;
1214 bool check_ref, by_ref;
1215 ao_ref r;
1216
1217 /* The function operates in three stages. First, we prepare check_ref, r,
1218 arg_base and arg_offset based on what is actually passed as an actual
1219 argument. */
1220
1221 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1222 {
1223 by_ref = true;
1224 if (TREE_CODE (arg) == SSA_NAME)
1225 {
1226 tree type_size;
1227 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1228 return;
1229 check_ref = true;
1230 arg_base = arg;
1231 arg_offset = 0;
1232 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1233 arg_size = tree_low_cst (type_size, 1);
1234 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1235 }
1236 else if (TREE_CODE (arg) == ADDR_EXPR)
1237 {
1238 HOST_WIDE_INT arg_max_size;
1239
1240 arg = TREE_OPERAND (arg, 0);
1241 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1242 &arg_max_size);
1243 if (arg_max_size == -1
1244 || arg_max_size != arg_size
1245 || arg_offset < 0)
1246 return;
1247 if (DECL_P (arg_base))
1248 {
1249 tree size;
1250 check_ref = false;
1251 size = build_int_cst (integer_type_node, arg_size);
1252 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1253 }
1254 else
1255 return;
1256 }
1257 else
1258 return;
1259 }
1260 else
1261 {
1262 HOST_WIDE_INT arg_max_size;
1263
1264 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1265
1266 by_ref = false;
1267 check_ref = false;
1268 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1269 &arg_max_size);
1270 if (arg_max_size == -1
1271 || arg_max_size != arg_size
1272 || arg_offset < 0)
1273 return;
1274
1275 ao_ref_init (&r, arg);
1276 }
1277
1278 /* Second stage walks back the BB, looks at individual statements and as long
1279 as it is confident of how the statements affect contents of the
1280 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1281 describing it. */
1282 gsi = gsi_for_stmt (call);
1283 gsi_prev (&gsi);
1284 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1285 {
1286 struct ipa_known_agg_contents_list *n, **p;
1287 gimple stmt = gsi_stmt (gsi);
1288 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1289 tree lhs, rhs, lhs_base;
1290 bool partial_overlap;
1291
1292 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1293 continue;
1294 if (!gimple_assign_single_p (stmt))
1295 break;
1296
1297 lhs = gimple_assign_lhs (stmt);
1298 rhs = gimple_assign_rhs1 (stmt);
1299 if (!is_gimple_reg_type (rhs))
1300 break;
1301
1302 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1303 &lhs_max_size);
1304 if (lhs_max_size == -1
1305 || lhs_max_size != lhs_size
1306 || (lhs_offset < arg_offset
1307 && lhs_offset + lhs_size > arg_offset)
1308 || (lhs_offset < arg_offset + arg_size
1309 && lhs_offset + lhs_size > arg_offset + arg_size))
1310 break;
1311
1312 if (check_ref)
1313 {
1314 if (TREE_CODE (lhs_base) != MEM_REF
1315 || TREE_OPERAND (lhs_base, 0) != arg_base
1316 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1317 break;
1318 }
1319 else if (lhs_base != arg_base)
1320 break;
1321
1322 if (lhs_offset + lhs_size < arg_offset
1323 || lhs_offset >= (arg_offset + arg_size))
1324 continue;
1325
1326 partial_overlap = false;
1327 p = &list;
1328 while (*p && (*p)->offset < lhs_offset)
1329 {
1330 if ((*p)->offset + (*p)->size > lhs_offset)
1331 {
1332 partial_overlap = true;
1333 break;
1334 }
1335 p = &(*p)->next;
1336 }
1337 if (partial_overlap)
1338 break;
1339 if (*p && (*p)->offset < lhs_offset + lhs_size)
1340 {
1341 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1342 /* We already know this value is subsequently overwritten with
1343 something else. */
1344 continue;
1345 else
1346 /* Otherwise this is a partial overlap which we cannot
1347 represent. */
1348 break;
1349 }
1350
1351 rhs = get_ssa_def_if_simple_copy (rhs);
1352 n = XALLOCA (struct ipa_known_agg_contents_list);
1353 n->size = lhs_size;
1354 n->offset = lhs_offset;
1355 if (is_gimple_ip_invariant (rhs))
1356 {
1357 n->constant = rhs;
1358 const_count++;
1359 }
1360 else
1361 n->constant = NULL_TREE;
1362 n->next = *p;
1363 *p = n;
1364
1365 item_count++;
1366 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1367 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1368 break;
1369 }
1370
1371 /* Third stage just goes over the list and creates an appropriate vector of
1372 ipa_agg_jf_item structures out of it, of sourse only if there are
1373 any known constants to begin with. */
1374
1375 if (const_count)
1376 {
1377 jfunc->agg.by_ref = by_ref;
1378 jfunc->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, const_count);
1379 while (list)
1380 {
1381 if (list->constant)
1382 {
1383 struct ipa_agg_jf_item item;
1384 item.offset = list->offset - arg_offset;
1385 item.value = prune_expression_for_jf (list->constant);
1386 VEC_quick_push (ipa_agg_jf_item_t, jfunc->agg.items, item);
1387 }
1388 list = list->next;
1389 }
1390 }
1391 }
1392
1393 /* Compute jump function for all arguments of callsite CS and insert the
1394 information in the jump_functions array in the ipa_edge_args corresponding
1395 to this callsite. */
1396
1397 static void
1398 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1399 struct cgraph_edge *cs)
1400 {
1401 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1402 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1403 gimple call = cs->call_stmt;
1404 int n, arg_num = gimple_call_num_args (call);
1405
1406 if (arg_num == 0 || args->jump_functions)
1407 return;
1408 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
1409
1410 for (n = 0; n < arg_num; n++)
1411 {
1412 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1413 tree arg = gimple_call_arg (call, n);
1414
1415 if (is_gimple_ip_invariant (arg))
1416 ipa_set_jf_constant (jfunc, arg);
1417 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1418 && TREE_CODE (arg) == PARM_DECL)
1419 {
1420 int index = ipa_get_param_decl_index (info, arg);
1421
1422 gcc_assert (index >=0);
1423 /* Aggregate passed by value, check for pass-through, otherwise we
1424 will attempt to fill in aggregate contents later in this
1425 for cycle. */
1426 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1427 {
1428 ipa_set_jf_simple_pass_through (jfunc, index, false);
1429 continue;
1430 }
1431 }
1432 else if (TREE_CODE (arg) == SSA_NAME)
1433 {
1434 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1435 {
1436 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1437 if (index >= 0
1438 && !detect_type_change_ssa (arg, call, jfunc))
1439 {
1440 bool agg_p;
1441 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1442 call, arg);
1443 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1444 }
1445 }
1446 else
1447 {
1448 gimple stmt = SSA_NAME_DEF_STMT (arg);
1449 if (is_gimple_assign (stmt))
1450 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1451 call, stmt, arg);
1452 else if (gimple_code (stmt) == GIMPLE_PHI)
1453 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1454 call, stmt);
1455 }
1456 }
1457 else
1458 compute_known_type_jump_func (arg, jfunc, call);
1459
1460 if ((jfunc->type != IPA_JF_PASS_THROUGH
1461 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1462 && (jfunc->type != IPA_JF_ANCESTOR
1463 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1464 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1465 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1466 determine_known_aggregate_parts (call, arg, jfunc);
1467 }
1468 }
1469
1470 /* Compute jump functions for all edges - both direct and indirect - outgoing
1471 from NODE. Also count the actual arguments in the process. */
1472
1473 static void
1474 ipa_compute_jump_functions (struct cgraph_node *node,
1475 struct param_analysis_info *parms_ainfo)
1476 {
1477 struct cgraph_edge *cs;
1478
1479 for (cs = node->callees; cs; cs = cs->next_callee)
1480 {
1481 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1482 NULL);
1483 /* We do not need to bother analyzing calls to unknown
1484 functions unless they may become known during lto/whopr. */
1485 if (!callee->analyzed && !flag_lto)
1486 continue;
1487 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1488 }
1489
1490 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1491 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1492 }
1493
1494 /* If STMT looks like a statement loading a value from a member pointer formal
1495 parameter, return that parameter and store the offset of the field to
1496 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1497 might be clobbered). If USE_DELTA, then we look for a use of the delta
1498 field rather than the pfn. */
1499
1500 static tree
1501 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1502 HOST_WIDE_INT *offset_p)
1503 {
1504 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1505
1506 if (!gimple_assign_single_p (stmt))
1507 return NULL_TREE;
1508
1509 rhs = gimple_assign_rhs1 (stmt);
1510 if (TREE_CODE (rhs) == COMPONENT_REF)
1511 {
1512 ref_field = TREE_OPERAND (rhs, 1);
1513 rhs = TREE_OPERAND (rhs, 0);
1514 }
1515 else
1516 ref_field = NULL_TREE;
1517 if (TREE_CODE (rhs) != MEM_REF)
1518 return NULL_TREE;
1519 rec = TREE_OPERAND (rhs, 0);
1520 if (TREE_CODE (rec) != ADDR_EXPR)
1521 return NULL_TREE;
1522 rec = TREE_OPERAND (rec, 0);
1523 if (TREE_CODE (rec) != PARM_DECL
1524 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1525 return NULL_TREE;
1526 ref_offset = TREE_OPERAND (rhs, 1);
1527
1528 if (use_delta)
1529 fld = delta_field;
1530 else
1531 fld = ptr_field;
1532 if (offset_p)
1533 *offset_p = int_bit_position (fld);
1534
1535 if (ref_field)
1536 {
1537 if (integer_nonzerop (ref_offset))
1538 return NULL_TREE;
1539 return ref_field == fld ? rec : NULL_TREE;
1540 }
1541 else
1542 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1543 : NULL_TREE;
1544 }
1545
1546 /* Returns true iff T is an SSA_NAME defined by a statement. */
1547
1548 static bool
1549 ipa_is_ssa_with_stmt_def (tree t)
1550 {
1551 if (TREE_CODE (t) == SSA_NAME
1552 && !SSA_NAME_IS_DEFAULT_DEF (t))
1553 return true;
1554 else
1555 return false;
1556 }
1557
1558 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1559 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1560 indirect call graph edge. */
1561
1562 static struct cgraph_edge *
1563 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1564 {
1565 struct cgraph_edge *cs;
1566
1567 cs = cgraph_edge (node, stmt);
1568 cs->indirect_info->param_index = param_index;
1569 cs->indirect_info->offset = 0;
1570 cs->indirect_info->polymorphic = 0;
1571 cs->indirect_info->agg_contents = 0;
1572 return cs;
1573 }
1574
1575 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1576 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1577 intermediate information about each formal parameter. Currently it checks
1578 whether the call calls a pointer that is a formal parameter and if so, the
1579 parameter is marked with the called flag and an indirect call graph edge
1580 describing the call is created. This is very simple for ordinary pointers
1581 represented in SSA but not-so-nice when it comes to member pointers. The
1582 ugly part of this function does nothing more than trying to match the
1583 pattern of such a call. An example of such a pattern is the gimple dump
1584 below, the call is on the last line:
1585
1586 <bb 2>:
1587 f$__delta_5 = f.__delta;
1588 f$__pfn_24 = f.__pfn;
1589
1590 or
1591 <bb 2>:
1592 f$__delta_5 = MEM[(struct *)&f];
1593 f$__pfn_24 = MEM[(struct *)&f + 4B];
1594
1595 and a few lines below:
1596
1597 <bb 5>
1598 D.2496_3 = (int) f$__pfn_24;
1599 D.2497_4 = D.2496_3 & 1;
1600 if (D.2497_4 != 0)
1601 goto <bb 3>;
1602 else
1603 goto <bb 4>;
1604
1605 <bb 6>:
1606 D.2500_7 = (unsigned int) f$__delta_5;
1607 D.2501_8 = &S + D.2500_7;
1608 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1609 D.2503_10 = *D.2502_9;
1610 D.2504_12 = f$__pfn_24 + -1;
1611 D.2505_13 = (unsigned int) D.2504_12;
1612 D.2506_14 = D.2503_10 + D.2505_13;
1613 D.2507_15 = *D.2506_14;
1614 iftmp.11_16 = (String:: *) D.2507_15;
1615
1616 <bb 7>:
1617 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1618 D.2500_19 = (unsigned int) f$__delta_5;
1619 D.2508_20 = &S + D.2500_19;
1620 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1621
1622 Such patterns are results of simple calls to a member pointer:
1623
1624 int doprinting (int (MyString::* f)(int) const)
1625 {
1626 MyString S ("somestring");
1627
1628 return (S.*f)(4);
1629 }
1630
1631 Moreover, the function also looks for called pointers loaded from aggregates
1632 passed by value or reference. */
1633
1634 static void
1635 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1636 struct ipa_node_params *info,
1637 struct param_analysis_info *parms_ainfo,
1638 gimple call, tree target)
1639 {
1640 gimple def;
1641 tree n1, n2;
1642 gimple d1, d2;
1643 tree rec, rec2, cond;
1644 gimple branch;
1645 int index;
1646 basic_block bb, virt_bb, join;
1647 HOST_WIDE_INT offset;
1648 bool by_ref;
1649
1650 if (SSA_NAME_IS_DEFAULT_DEF (target))
1651 {
1652 tree var = SSA_NAME_VAR (target);
1653 index = ipa_get_param_decl_index (info, var);
1654 if (index >= 0)
1655 ipa_note_param_call (node, index, call);
1656 return;
1657 }
1658
1659 def = SSA_NAME_DEF_STMT (target);
1660 if (gimple_assign_single_p (def)
1661 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1662 gimple_assign_rhs1 (def), &index, &offset,
1663 &by_ref))
1664 {
1665 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1666 cs->indirect_info->offset = offset;
1667 cs->indirect_info->agg_contents = 1;
1668 cs->indirect_info->by_ref = by_ref;
1669 return;
1670 }
1671
1672 /* Now we need to try to match the complex pattern of calling a member
1673 pointer. */
1674 if (gimple_code (def) != GIMPLE_PHI
1675 || gimple_phi_num_args (def) != 2
1676 || !POINTER_TYPE_P (TREE_TYPE (target))
1677 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1678 return;
1679
1680 /* First, we need to check whether one of these is a load from a member
1681 pointer that is a parameter to this function. */
1682 n1 = PHI_ARG_DEF (def, 0);
1683 n2 = PHI_ARG_DEF (def, 1);
1684 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1685 return;
1686 d1 = SSA_NAME_DEF_STMT (n1);
1687 d2 = SSA_NAME_DEF_STMT (n2);
1688
1689 join = gimple_bb (def);
1690 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1691 {
1692 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1693 return;
1694
1695 bb = EDGE_PRED (join, 0)->src;
1696 virt_bb = gimple_bb (d2);
1697 }
1698 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1699 {
1700 bb = EDGE_PRED (join, 1)->src;
1701 virt_bb = gimple_bb (d1);
1702 }
1703 else
1704 return;
1705
1706 /* Second, we need to check that the basic blocks are laid out in the way
1707 corresponding to the pattern. */
1708
1709 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1710 || single_pred (virt_bb) != bb
1711 || single_succ (virt_bb) != join)
1712 return;
1713
1714 /* Third, let's see that the branching is done depending on the least
1715 significant bit of the pfn. */
1716
1717 branch = last_stmt (bb);
1718 if (!branch || gimple_code (branch) != GIMPLE_COND)
1719 return;
1720
1721 if ((gimple_cond_code (branch) != NE_EXPR
1722 && gimple_cond_code (branch) != EQ_EXPR)
1723 || !integer_zerop (gimple_cond_rhs (branch)))
1724 return;
1725
1726 cond = gimple_cond_lhs (branch);
1727 if (!ipa_is_ssa_with_stmt_def (cond))
1728 return;
1729
1730 def = SSA_NAME_DEF_STMT (cond);
1731 if (!is_gimple_assign (def)
1732 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1733 || !integer_onep (gimple_assign_rhs2 (def)))
1734 return;
1735
1736 cond = gimple_assign_rhs1 (def);
1737 if (!ipa_is_ssa_with_stmt_def (cond))
1738 return;
1739
1740 def = SSA_NAME_DEF_STMT (cond);
1741
1742 if (is_gimple_assign (def)
1743 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1744 {
1745 cond = gimple_assign_rhs1 (def);
1746 if (!ipa_is_ssa_with_stmt_def (cond))
1747 return;
1748 def = SSA_NAME_DEF_STMT (cond);
1749 }
1750
1751 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1752 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1753 == ptrmemfunc_vbit_in_delta),
1754 NULL);
1755 if (rec != rec2)
1756 return;
1757
1758 index = ipa_get_param_decl_index (info, rec);
1759 if (index >= 0
1760 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1761 {
1762 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1763 cs->indirect_info->offset = offset;
1764 cs->indirect_info->agg_contents = 1;
1765 }
1766
1767 return;
1768 }
1769
1770 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1771 object referenced in the expression is a formal parameter of the caller
1772 (described by INFO), create a call note for the statement. */
1773
1774 static void
1775 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1776 struct ipa_node_params *info, gimple call,
1777 tree target)
1778 {
1779 struct cgraph_edge *cs;
1780 struct cgraph_indirect_call_info *ii;
1781 struct ipa_jump_func jfunc;
1782 tree obj = OBJ_TYPE_REF_OBJECT (target);
1783 int index;
1784 HOST_WIDE_INT anc_offset;
1785
1786 if (!flag_devirtualize)
1787 return;
1788
1789 if (TREE_CODE (obj) != SSA_NAME)
1790 return;
1791
1792 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1793 {
1794 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1795 return;
1796
1797 anc_offset = 0;
1798 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1799 gcc_assert (index >= 0);
1800 if (detect_type_change_ssa (obj, call, &jfunc))
1801 return;
1802 }
1803 else
1804 {
1805 gimple stmt = SSA_NAME_DEF_STMT (obj);
1806 tree expr;
1807
1808 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1809 if (!expr)
1810 return;
1811 index = ipa_get_param_decl_index (info,
1812 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1813 gcc_assert (index >= 0);
1814 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1815 return;
1816 }
1817
1818 cs = ipa_note_param_call (node, index, call);
1819 ii = cs->indirect_info;
1820 ii->offset = anc_offset;
1821 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1822 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1823 ii->polymorphic = 1;
1824 }
1825
1826 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1827 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1828 containing intermediate information about each formal parameter. */
1829
1830 static void
1831 ipa_analyze_call_uses (struct cgraph_node *node,
1832 struct ipa_node_params *info,
1833 struct param_analysis_info *parms_ainfo, gimple call)
1834 {
1835 tree target = gimple_call_fn (call);
1836
1837 if (!target)
1838 return;
1839 if (TREE_CODE (target) == SSA_NAME)
1840 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1841 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1842 ipa_analyze_virtual_call_uses (node, info, call, target);
1843 }
1844
1845
1846 /* Analyze the call statement STMT with respect to formal parameters (described
1847 in INFO) of caller given by NODE. Currently it only checks whether formal
1848 parameters are called. PARMS_AINFO is a pointer to a vector containing
1849 intermediate information about each formal parameter. */
1850
1851 static void
1852 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1853 struct param_analysis_info *parms_ainfo, gimple stmt)
1854 {
1855 if (is_gimple_call (stmt))
1856 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1857 }
1858
1859 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1860 If OP is a parameter declaration, mark it as used in the info structure
1861 passed in DATA. */
1862
1863 static bool
1864 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1865 tree op, void *data)
1866 {
1867 struct ipa_node_params *info = (struct ipa_node_params *) data;
1868
1869 op = get_base_address (op);
1870 if (op
1871 && TREE_CODE (op) == PARM_DECL)
1872 {
1873 int index = ipa_get_param_decl_index (info, op);
1874 gcc_assert (index >= 0);
1875 ipa_set_param_used (info, index, true);
1876 }
1877
1878 return false;
1879 }
1880
1881 /* Scan the function body of NODE and inspect the uses of formal parameters.
1882 Store the findings in various structures of the associated ipa_node_params
1883 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1884 vector containing intermediate information about each formal parameter. */
1885
1886 static void
1887 ipa_analyze_params_uses (struct cgraph_node *node,
1888 struct param_analysis_info *parms_ainfo)
1889 {
1890 tree decl = node->symbol.decl;
1891 basic_block bb;
1892 struct function *func;
1893 gimple_stmt_iterator gsi;
1894 struct ipa_node_params *info = IPA_NODE_REF (node);
1895 int i;
1896
1897 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1898 return;
1899
1900 for (i = 0; i < ipa_get_param_count (info); i++)
1901 {
1902 tree parm = ipa_get_param (info, i);
1903 tree ddef;
1904 /* For SSA regs see if parameter is used. For non-SSA we compute
1905 the flag during modification analysis. */
1906 if (is_gimple_reg (parm)
1907 && (ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1908 parm)) != NULL_TREE
1909 && !has_zero_uses (ddef))
1910 ipa_set_param_used (info, i, true);
1911 }
1912
1913 func = DECL_STRUCT_FUNCTION (decl);
1914 FOR_EACH_BB_FN (bb, func)
1915 {
1916 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1917 {
1918 gimple stmt = gsi_stmt (gsi);
1919
1920 if (is_gimple_debug (stmt))
1921 continue;
1922
1923 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1924 walk_stmt_load_store_addr_ops (stmt, info,
1925 visit_ref_for_mod_analysis,
1926 visit_ref_for_mod_analysis,
1927 visit_ref_for_mod_analysis);
1928 }
1929 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1930 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1931 visit_ref_for_mod_analysis,
1932 visit_ref_for_mod_analysis,
1933 visit_ref_for_mod_analysis);
1934 }
1935
1936 info->uses_analysis_done = 1;
1937 }
1938
1939 /* Initialize the array describing properties of of formal parameters
1940 of NODE, analyze their uses and compute jump functions associated
1941 with actual arguments of calls from within NODE. */
1942
1943 void
1944 ipa_analyze_node (struct cgraph_node *node)
1945 {
1946 struct ipa_node_params *info;
1947 struct param_analysis_info *parms_ainfo;
1948 int i, param_count;
1949
1950 ipa_check_create_node_params ();
1951 ipa_check_create_edge_args ();
1952 info = IPA_NODE_REF (node);
1953 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
1954 ipa_initialize_node_params (node);
1955
1956 param_count = ipa_get_param_count (info);
1957 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1958 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1959
1960 ipa_analyze_params_uses (node, parms_ainfo);
1961 ipa_compute_jump_functions (node, parms_ainfo);
1962
1963 for (i = 0; i < param_count; i++)
1964 {
1965 if (parms_ainfo[i].parm_visited_statements)
1966 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
1967 if (parms_ainfo[i].pt_visited_statements)
1968 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
1969 }
1970
1971 pop_cfun ();
1972 }
1973
1974
1975 /* Update the jump function DST when the call graph edge corresponding to SRC is
1976 is being inlined, knowing that DST is of type ancestor and src of known
1977 type. */
1978
1979 static void
1980 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1981 struct ipa_jump_func *dst)
1982 {
1983 HOST_WIDE_INT combined_offset;
1984 tree combined_type;
1985
1986 combined_offset = ipa_get_jf_known_type_offset (src)
1987 + ipa_get_jf_ancestor_offset (dst);
1988 combined_type = ipa_get_jf_ancestor_type (dst);
1989
1990 ipa_set_jf_known_type (dst, combined_offset,
1991 ipa_get_jf_known_type_base_type (src),
1992 combined_type);
1993 }
1994
1995 /* Update the jump functions associated with call graph edge E when the call
1996 graph edge CS is being inlined, assuming that E->caller is already (possibly
1997 indirectly) inlined into CS->callee and that E has not been inlined. */
1998
1999 static void
2000 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2001 struct cgraph_edge *e)
2002 {
2003 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2004 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2005 int count = ipa_get_cs_argument_count (args);
2006 int i;
2007
2008 for (i = 0; i < count; i++)
2009 {
2010 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2011
2012 if (dst->type == IPA_JF_ANCESTOR)
2013 {
2014 struct ipa_jump_func *src;
2015 int dst_fid = dst->value.ancestor.formal_id;
2016
2017 /* Variable number of arguments can cause havoc if we try to access
2018 one that does not exist in the inlined edge. So make sure we
2019 don't. */
2020 if (dst_fid >= ipa_get_cs_argument_count (top))
2021 {
2022 dst->type = IPA_JF_UNKNOWN;
2023 continue;
2024 }
2025
2026 src = ipa_get_ith_jump_func (top, dst_fid);
2027
2028 if (src->agg.items
2029 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2030 {
2031 struct ipa_agg_jf_item *item;
2032 int j;
2033
2034 /* Currently we do not produce clobber aggregate jump functions,
2035 replace with merging when we do. */
2036 gcc_assert (!dst->agg.items);
2037
2038 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc, src->agg.items);
2039 dst->agg.by_ref = src->agg.by_ref;
2040 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, dst->agg.items, j, item)
2041 item->offset -= dst->value.ancestor.offset;
2042 }
2043
2044 if (src->type == IPA_JF_KNOWN_TYPE)
2045 combine_known_type_and_ancestor_jfs (src, dst);
2046 else if (src->type == IPA_JF_PASS_THROUGH
2047 && src->value.pass_through.operation == NOP_EXPR)
2048 {
2049 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2050 dst->value.ancestor.agg_preserved &=
2051 src->value.pass_through.agg_preserved;
2052 }
2053 else if (src->type == IPA_JF_ANCESTOR)
2054 {
2055 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2056 dst->value.ancestor.offset += src->value.ancestor.offset;
2057 dst->value.ancestor.agg_preserved &=
2058 src->value.ancestor.agg_preserved;
2059 }
2060 else
2061 dst->type = IPA_JF_UNKNOWN;
2062 }
2063 else if (dst->type == IPA_JF_PASS_THROUGH)
2064 {
2065 struct ipa_jump_func *src;
2066 /* We must check range due to calls with variable number of arguments
2067 and we cannot combine jump functions with operations. */
2068 if (dst->value.pass_through.operation == NOP_EXPR
2069 && (dst->value.pass_through.formal_id
2070 < ipa_get_cs_argument_count (top)))
2071 {
2072 bool agg_p;
2073 int dst_fid = dst->value.pass_through.formal_id;
2074 src = ipa_get_ith_jump_func (top, dst_fid);
2075 agg_p = dst->value.pass_through.agg_preserved;
2076
2077 dst->type = src->type;
2078 dst->value = src->value;
2079
2080 if (src->agg.items
2081 && (agg_p || !src->agg.by_ref))
2082 {
2083 /* Currently we do not produce clobber aggregate jump
2084 functions, replace with merging when we do. */
2085 gcc_assert (!dst->agg.items);
2086
2087 dst->agg.by_ref = src->agg.by_ref;
2088 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc,
2089 src->agg.items);
2090 }
2091
2092 if (!agg_p)
2093 {
2094 if (dst->type == IPA_JF_PASS_THROUGH)
2095 dst->value.pass_through.agg_preserved = false;
2096 else if (dst->type == IPA_JF_ANCESTOR)
2097 dst->value.ancestor.agg_preserved = false;
2098 }
2099 }
2100 else
2101 dst->type = IPA_JF_UNKNOWN;
2102 }
2103 }
2104 }
2105
2106 /* If TARGET is an addr_expr of a function declaration, make it the destination
2107 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2108
2109 struct cgraph_edge *
2110 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2111 {
2112 struct cgraph_node *callee;
2113 struct inline_edge_summary *es = inline_edge_summary (ie);
2114
2115 if (TREE_CODE (target) == ADDR_EXPR)
2116 target = TREE_OPERAND (target, 0);
2117 if (TREE_CODE (target) != FUNCTION_DECL)
2118 return NULL;
2119 callee = cgraph_get_node (target);
2120 if (!callee)
2121 return NULL;
2122 ipa_check_create_node_params ();
2123
2124 /* We can not make edges to inline clones. It is bug that someone removed
2125 the cgraph node too early. */
2126 gcc_assert (!callee->global.inlined_to);
2127
2128 cgraph_make_edge_direct (ie, callee);
2129 es = inline_edge_summary (ie);
2130 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2131 - eni_size_weights.call_cost);
2132 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2133 - eni_time_weights.call_cost);
2134 if (dump_file)
2135 {
2136 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2137 "(%s/%i -> %s/%i), for stmt ",
2138 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2139 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
2140 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
2141 if (ie->call_stmt)
2142 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2143 else
2144 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2145 }
2146 callee = cgraph_function_or_thunk_node (callee, NULL);
2147
2148 return ie;
2149 }
2150
2151 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2152 return NULL if there is not any. BY_REF specifies whether the value has to
2153 be passed by reference or by value. */
2154
2155 tree
2156 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2157 HOST_WIDE_INT offset, bool by_ref)
2158 {
2159 struct ipa_agg_jf_item *item;
2160 int i;
2161
2162 if (by_ref != agg->by_ref)
2163 return NULL;
2164
2165 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, agg->items, i, item)
2166 {
2167 if (item->offset == offset)
2168 {
2169 /* Currently we do not have clobber values, return NULL for them once
2170 we do. */
2171 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2172 return item->value;
2173 }
2174 else if (item->offset > offset)
2175 return NULL;
2176 }
2177 return NULL;
2178 }
2179
2180 /* Try to find a destination for indirect edge IE that corresponds to a simple
2181 call or a call of a member function pointer and where the destination is a
2182 pointer formal parameter described by jump function JFUNC. If it can be
2183 determined, return the newly direct edge, otherwise return NULL. */
2184
2185 static struct cgraph_edge *
2186 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2187 struct ipa_jump_func *jfunc)
2188 {
2189 tree target;
2190
2191 if (ie->indirect_info->agg_contents)
2192 {
2193 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2194 ie->indirect_info->offset,
2195 ie->indirect_info->by_ref);
2196 if (!target)
2197 return NULL;
2198 }
2199 else
2200 {
2201 if (jfunc->type != IPA_JF_CONST)
2202 return NULL;
2203 target = ipa_get_jf_constant (jfunc);
2204 }
2205 return ipa_make_edge_direct_to_target (ie, target);
2206 }
2207
2208 /* Try to find a destination for indirect edge IE that corresponds to a
2209 virtual call based on a formal parameter which is described by jump
2210 function JFUNC and if it can be determined, make it direct and return the
2211 direct edge. Otherwise, return NULL. */
2212
2213 static struct cgraph_edge *
2214 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2215 struct ipa_jump_func *jfunc)
2216 {
2217 tree binfo, target;
2218
2219 if (jfunc->type != IPA_JF_KNOWN_TYPE)
2220 return NULL;
2221
2222 binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
2223 gcc_checking_assert (binfo);
2224 binfo = get_binfo_at_offset (binfo, ipa_get_jf_known_type_offset (jfunc)
2225 + ie->indirect_info->offset,
2226 ie->indirect_info->otr_type);
2227 if (binfo)
2228 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2229 binfo);
2230 else
2231 return NULL;
2232
2233 if (target)
2234 return ipa_make_edge_direct_to_target (ie, target);
2235 else
2236 return NULL;
2237 }
2238
2239 /* Update the param called notes associated with NODE when CS is being inlined,
2240 assuming NODE is (potentially indirectly) inlined into CS->callee.
2241 Moreover, if the callee is discovered to be constant, create a new cgraph
2242 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2243 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2244
2245 static bool
2246 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2247 struct cgraph_node *node,
2248 VEC (cgraph_edge_p, heap) **new_edges)
2249 {
2250 struct ipa_edge_args *top;
2251 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2252 bool res = false;
2253
2254 ipa_check_create_edge_args ();
2255 top = IPA_EDGE_REF (cs);
2256
2257 for (ie = node->indirect_calls; ie; ie = next_ie)
2258 {
2259 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2260 struct ipa_jump_func *jfunc;
2261 int param_index;
2262
2263 next_ie = ie->next_callee;
2264
2265 if (ici->param_index == -1)
2266 continue;
2267
2268 /* We must check range due to calls with variable number of arguments: */
2269 if (ici->param_index >= ipa_get_cs_argument_count (top))
2270 {
2271 ici->param_index = -1;
2272 continue;
2273 }
2274
2275 param_index = ici->param_index;
2276 jfunc = ipa_get_ith_jump_func (top, param_index);
2277 if (jfunc->type == IPA_JF_PASS_THROUGH
2278 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2279 {
2280 if (ici->agg_contents
2281 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2282 ici->param_index = -1;
2283 else
2284 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2285 }
2286 else if (jfunc->type == IPA_JF_ANCESTOR)
2287 {
2288 if (ici->agg_contents
2289 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2290 ici->param_index = -1;
2291 else
2292 {
2293 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2294 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2295 }
2296 }
2297 else
2298 /* Either we can find a destination for this edge now or never. */
2299 ici->param_index = -1;
2300
2301 if (!flag_indirect_inlining)
2302 continue;
2303
2304 if (ici->polymorphic)
2305 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
2306 else
2307 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
2308
2309 if (new_direct_edge)
2310 {
2311 new_direct_edge->indirect_inlining_edge = 1;
2312 if (new_direct_edge->call_stmt)
2313 new_direct_edge->call_stmt_cannot_inline_p
2314 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
2315 new_direct_edge->callee->symbol.decl);
2316 if (new_edges)
2317 {
2318 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
2319 new_direct_edge);
2320 top = IPA_EDGE_REF (cs);
2321 res = true;
2322 }
2323 }
2324 }
2325
2326 return res;
2327 }
2328
2329 /* Recursively traverse subtree of NODE (including node) made of inlined
2330 cgraph_edges when CS has been inlined and invoke
2331 update_indirect_edges_after_inlining on all nodes and
2332 update_jump_functions_after_inlining on all non-inlined edges that lead out
2333 of this subtree. Newly discovered indirect edges will be added to
2334 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2335 created. */
2336
2337 static bool
2338 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2339 struct cgraph_node *node,
2340 VEC (cgraph_edge_p, heap) **new_edges)
2341 {
2342 struct cgraph_edge *e;
2343 bool res;
2344
2345 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2346
2347 for (e = node->callees; e; e = e->next_callee)
2348 if (!e->inline_failed)
2349 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2350 else
2351 update_jump_functions_after_inlining (cs, e);
2352 for (e = node->indirect_calls; e; e = e->next_callee)
2353 update_jump_functions_after_inlining (cs, e);
2354
2355 return res;
2356 }
2357
2358 /* Update jump functions and call note functions on inlining the call site CS.
2359 CS is expected to lead to a node already cloned by
2360 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2361 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2362 created. */
2363
2364 bool
2365 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2366 VEC (cgraph_edge_p, heap) **new_edges)
2367 {
2368 bool changed;
2369 /* Do nothing if the preparation phase has not been carried out yet
2370 (i.e. during early inlining). */
2371 if (!ipa_node_params_vector)
2372 return false;
2373 gcc_assert (ipa_edge_args_vector);
2374
2375 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2376
2377 /* We do not keep jump functions of inlined edges up to date. Better to free
2378 them so we do not access them accidentally. */
2379 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2380 return changed;
2381 }
2382
2383 /* Frees all dynamically allocated structures that the argument info points
2384 to. */
2385
2386 void
2387 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2388 {
2389 if (args->jump_functions)
2390 ggc_free (args->jump_functions);
2391
2392 memset (args, 0, sizeof (*args));
2393 }
2394
2395 /* Free all ipa_edge structures. */
2396
2397 void
2398 ipa_free_all_edge_args (void)
2399 {
2400 int i;
2401 struct ipa_edge_args *args;
2402
2403 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
2404 ipa_free_edge_args_substructures (args);
2405
2406 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
2407 ipa_edge_args_vector = NULL;
2408 }
2409
2410 /* Frees all dynamically allocated structures that the param info points
2411 to. */
2412
2413 void
2414 ipa_free_node_params_substructures (struct ipa_node_params *info)
2415 {
2416 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
2417 free (info->lattices);
2418 /* Lattice values and their sources are deallocated with their alocation
2419 pool. */
2420 VEC_free (tree, heap, info->known_vals);
2421 memset (info, 0, sizeof (*info));
2422 }
2423
2424 /* Free all ipa_node_params structures. */
2425
2426 void
2427 ipa_free_all_node_params (void)
2428 {
2429 int i;
2430 struct ipa_node_params *info;
2431
2432 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
2433 ipa_free_node_params_substructures (info);
2434
2435 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
2436 ipa_node_params_vector = NULL;
2437 }
2438
2439 /* Hook that is called by cgraph.c when an edge is removed. */
2440
2441 static void
2442 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2443 {
2444 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2445 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
2446 <= (unsigned)cs->uid)
2447 return;
2448 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2449 }
2450
2451 /* Hook that is called by cgraph.c when a node is removed. */
2452
2453 static void
2454 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2455 {
2456 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2457 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
2458 <= (unsigned)node->uid)
2459 return;
2460 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2461 }
2462
2463 /* Hook that is called by cgraph.c when an edge is duplicated. */
2464
2465 static void
2466 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2467 __attribute__((unused)) void *data)
2468 {
2469 struct ipa_edge_args *old_args, *new_args;
2470 unsigned int i;
2471
2472 ipa_check_create_edge_args ();
2473
2474 old_args = IPA_EDGE_REF (src);
2475 new_args = IPA_EDGE_REF (dst);
2476
2477 new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
2478 old_args->jump_functions);
2479
2480 for (i = 0; i < VEC_length (ipa_jump_func_t, old_args->jump_functions); i++)
2481 VEC_index (ipa_jump_func_t, new_args->jump_functions, i).agg.items
2482 = VEC_copy (ipa_agg_jf_item_t, gc,
2483 VEC_index (ipa_jump_func_t,
2484 old_args->jump_functions, i).agg.items);
2485 }
2486
2487 /* Hook that is called by cgraph.c when a node is duplicated. */
2488
2489 static void
2490 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2491 ATTRIBUTE_UNUSED void *data)
2492 {
2493 struct ipa_node_params *old_info, *new_info;
2494
2495 ipa_check_create_node_params ();
2496 old_info = IPA_NODE_REF (src);
2497 new_info = IPA_NODE_REF (dst);
2498
2499 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
2500 old_info->descriptors);
2501 new_info->lattices = NULL;
2502 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2503
2504 new_info->uses_analysis_done = old_info->uses_analysis_done;
2505 new_info->node_enqueued = old_info->node_enqueued;
2506 }
2507
2508
2509 /* Analyze newly added function into callgraph. */
2510
2511 static void
2512 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2513 {
2514 ipa_analyze_node (node);
2515 }
2516
2517 /* Register our cgraph hooks if they are not already there. */
2518
2519 void
2520 ipa_register_cgraph_hooks (void)
2521 {
2522 if (!edge_removal_hook_holder)
2523 edge_removal_hook_holder =
2524 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2525 if (!node_removal_hook_holder)
2526 node_removal_hook_holder =
2527 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2528 if (!edge_duplication_hook_holder)
2529 edge_duplication_hook_holder =
2530 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2531 if (!node_duplication_hook_holder)
2532 node_duplication_hook_holder =
2533 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2534 function_insertion_hook_holder =
2535 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2536 }
2537
2538 /* Unregister our cgraph hooks if they are not already there. */
2539
2540 static void
2541 ipa_unregister_cgraph_hooks (void)
2542 {
2543 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2544 edge_removal_hook_holder = NULL;
2545 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2546 node_removal_hook_holder = NULL;
2547 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2548 edge_duplication_hook_holder = NULL;
2549 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2550 node_duplication_hook_holder = NULL;
2551 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2552 function_insertion_hook_holder = NULL;
2553 }
2554
2555 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2556 longer needed after ipa-cp. */
2557
2558 void
2559 ipa_free_all_structures_after_ipa_cp (void)
2560 {
2561 if (!optimize)
2562 {
2563 ipa_free_all_edge_args ();
2564 ipa_free_all_node_params ();
2565 free_alloc_pool (ipcp_sources_pool);
2566 free_alloc_pool (ipcp_values_pool);
2567 ipa_unregister_cgraph_hooks ();
2568 }
2569 }
2570
2571 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2572 longer needed after indirect inlining. */
2573
2574 void
2575 ipa_free_all_structures_after_iinln (void)
2576 {
2577 ipa_free_all_edge_args ();
2578 ipa_free_all_node_params ();
2579 ipa_unregister_cgraph_hooks ();
2580 if (ipcp_sources_pool)
2581 free_alloc_pool (ipcp_sources_pool);
2582 if (ipcp_values_pool)
2583 free_alloc_pool (ipcp_values_pool);
2584 }
2585
2586 /* Print ipa_tree_map data structures of all functions in the
2587 callgraph to F. */
2588
2589 void
2590 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2591 {
2592 int i, count;
2593 tree temp;
2594 struct ipa_node_params *info;
2595
2596 if (!node->analyzed)
2597 return;
2598 info = IPA_NODE_REF (node);
2599 fprintf (f, " function %s parameter descriptors:\n",
2600 cgraph_node_name (node));
2601 count = ipa_get_param_count (info);
2602 for (i = 0; i < count; i++)
2603 {
2604 temp = ipa_get_param (info, i);
2605 if (TREE_CODE (temp) == PARM_DECL)
2606 fprintf (f, " param %d : %s", i,
2607 (DECL_NAME (temp)
2608 ? (*lang_hooks.decl_printable_name) (temp, 2)
2609 : "(unnamed)"));
2610 if (ipa_is_param_used (info, i))
2611 fprintf (f, " used");
2612 fprintf (f, "\n");
2613 }
2614 }
2615
2616 /* Print ipa_tree_map data structures of all functions in the
2617 callgraph to F. */
2618
2619 void
2620 ipa_print_all_params (FILE * f)
2621 {
2622 struct cgraph_node *node;
2623
2624 fprintf (f, "\nFunction parameters:\n");
2625 FOR_EACH_FUNCTION (node)
2626 ipa_print_node_params (f, node);
2627 }
2628
2629 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2630
2631 VEC(tree, heap) *
2632 ipa_get_vector_of_formal_parms (tree fndecl)
2633 {
2634 VEC(tree, heap) *args;
2635 int count;
2636 tree parm;
2637
2638 count = count_formal_params (fndecl);
2639 args = VEC_alloc (tree, heap, count);
2640 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2641 VEC_quick_push (tree, args, parm);
2642
2643 return args;
2644 }
2645
2646 /* Return a heap allocated vector containing types of formal parameters of
2647 function type FNTYPE. */
2648
2649 static inline VEC(tree, heap) *
2650 get_vector_of_formal_parm_types (tree fntype)
2651 {
2652 VEC(tree, heap) *types;
2653 int count = 0;
2654 tree t;
2655
2656 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2657 count++;
2658
2659 types = VEC_alloc (tree, heap, count);
2660 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2661 VEC_quick_push (tree, types, TREE_VALUE (t));
2662
2663 return types;
2664 }
2665
2666 /* Modify the function declaration FNDECL and its type according to the plan in
2667 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2668 to reflect the actual parameters being modified which are determined by the
2669 base_index field. */
2670
2671 void
2672 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2673 const char *synth_parm_prefix)
2674 {
2675 VEC(tree, heap) *oparms, *otypes;
2676 tree orig_type, new_type = NULL;
2677 tree old_arg_types, t, new_arg_types = NULL;
2678 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2679 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2680 tree new_reversed = NULL;
2681 bool care_for_types, last_parm_void;
2682
2683 if (!synth_parm_prefix)
2684 synth_parm_prefix = "SYNTH";
2685
2686 oparms = ipa_get_vector_of_formal_parms (fndecl);
2687 orig_type = TREE_TYPE (fndecl);
2688 old_arg_types = TYPE_ARG_TYPES (orig_type);
2689
2690 /* The following test is an ugly hack, some functions simply don't have any
2691 arguments in their type. This is probably a bug but well... */
2692 care_for_types = (old_arg_types != NULL_TREE);
2693 if (care_for_types)
2694 {
2695 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2696 == void_type_node);
2697 otypes = get_vector_of_formal_parm_types (orig_type);
2698 if (last_parm_void)
2699 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2700 else
2701 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2702 }
2703 else
2704 {
2705 last_parm_void = false;
2706 otypes = NULL;
2707 }
2708
2709 for (i = 0; i < len; i++)
2710 {
2711 struct ipa_parm_adjustment *adj;
2712 gcc_assert (link);
2713
2714 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2715 parm = VEC_index (tree, oparms, adj->base_index);
2716 adj->base = parm;
2717
2718 if (adj->copy_param)
2719 {
2720 if (care_for_types)
2721 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2722 adj->base_index),
2723 new_arg_types);
2724 *link = parm;
2725 link = &DECL_CHAIN (parm);
2726 }
2727 else if (!adj->remove_param)
2728 {
2729 tree new_parm;
2730 tree ptype;
2731
2732 if (adj->by_ref)
2733 ptype = build_pointer_type (adj->type);
2734 else
2735 ptype = adj->type;
2736
2737 if (care_for_types)
2738 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2739
2740 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2741 ptype);
2742 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2743
2744 DECL_ARTIFICIAL (new_parm) = 1;
2745 DECL_ARG_TYPE (new_parm) = ptype;
2746 DECL_CONTEXT (new_parm) = fndecl;
2747 TREE_USED (new_parm) = 1;
2748 DECL_IGNORED_P (new_parm) = 1;
2749 layout_decl (new_parm, 0);
2750
2751 adj->base = parm;
2752 adj->reduction = new_parm;
2753
2754 *link = new_parm;
2755
2756 link = &DECL_CHAIN (new_parm);
2757 }
2758 }
2759
2760 *link = NULL_TREE;
2761
2762 if (care_for_types)
2763 {
2764 new_reversed = nreverse (new_arg_types);
2765 if (last_parm_void)
2766 {
2767 if (new_reversed)
2768 TREE_CHAIN (new_arg_types) = void_list_node;
2769 else
2770 new_reversed = void_list_node;
2771 }
2772 }
2773
2774 /* Use copy_node to preserve as much as possible from original type
2775 (debug info, attribute lists etc.)
2776 Exception is METHOD_TYPEs must have THIS argument.
2777 When we are asked to remove it, we need to build new FUNCTION_TYPE
2778 instead. */
2779 if (TREE_CODE (orig_type) != METHOD_TYPE
2780 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0).copy_param
2781 && VEC_index (ipa_parm_adjustment_t, adjustments, 0).base_index == 0))
2782 {
2783 new_type = build_distinct_type_copy (orig_type);
2784 TYPE_ARG_TYPES (new_type) = new_reversed;
2785 }
2786 else
2787 {
2788 new_type
2789 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2790 new_reversed));
2791 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2792 DECL_VINDEX (fndecl) = NULL_TREE;
2793 }
2794
2795 /* When signature changes, we need to clear builtin info. */
2796 if (DECL_BUILT_IN (fndecl))
2797 {
2798 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2799 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2800 }
2801
2802 /* This is a new type, not a copy of an old type. Need to reassociate
2803 variants. We can handle everything except the main variant lazily. */
2804 t = TYPE_MAIN_VARIANT (orig_type);
2805 if (orig_type != t)
2806 {
2807 TYPE_MAIN_VARIANT (new_type) = t;
2808 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2809 TYPE_NEXT_VARIANT (t) = new_type;
2810 }
2811 else
2812 {
2813 TYPE_MAIN_VARIANT (new_type) = new_type;
2814 TYPE_NEXT_VARIANT (new_type) = NULL;
2815 }
2816
2817 TREE_TYPE (fndecl) = new_type;
2818 DECL_VIRTUAL_P (fndecl) = 0;
2819 if (otypes)
2820 VEC_free (tree, heap, otypes);
2821 VEC_free (tree, heap, oparms);
2822 }
2823
2824 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2825 If this is a directly recursive call, CS must be NULL. Otherwise it must
2826 contain the corresponding call graph edge. */
2827
2828 void
2829 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2830 ipa_parm_adjustment_vec adjustments)
2831 {
2832 VEC(tree, heap) *vargs;
2833 VEC(tree, gc) **debug_args = NULL;
2834 gimple new_stmt;
2835 gimple_stmt_iterator gsi;
2836 tree callee_decl;
2837 int i, len;
2838
2839 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2840 vargs = VEC_alloc (tree, heap, len);
2841 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
2842
2843 gsi = gsi_for_stmt (stmt);
2844 for (i = 0; i < len; i++)
2845 {
2846 struct ipa_parm_adjustment *adj;
2847
2848 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2849
2850 if (adj->copy_param)
2851 {
2852 tree arg = gimple_call_arg (stmt, adj->base_index);
2853
2854 VEC_quick_push (tree, vargs, arg);
2855 }
2856 else if (!adj->remove_param)
2857 {
2858 tree expr, base, off;
2859 location_t loc;
2860
2861 /* We create a new parameter out of the value of the old one, we can
2862 do the following kind of transformations:
2863
2864 - A scalar passed by reference is converted to a scalar passed by
2865 value. (adj->by_ref is false and the type of the original
2866 actual argument is a pointer to a scalar).
2867
2868 - A part of an aggregate is passed instead of the whole aggregate.
2869 The part can be passed either by value or by reference, this is
2870 determined by value of adj->by_ref. Moreover, the code below
2871 handles both situations when the original aggregate is passed by
2872 value (its type is not a pointer) and when it is passed by
2873 reference (it is a pointer to an aggregate).
2874
2875 When the new argument is passed by reference (adj->by_ref is true)
2876 it must be a part of an aggregate and therefore we form it by
2877 simply taking the address of a reference inside the original
2878 aggregate. */
2879
2880 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2881 base = gimple_call_arg (stmt, adj->base_index);
2882 loc = EXPR_LOCATION (base);
2883
2884 if (TREE_CODE (base) != ADDR_EXPR
2885 && POINTER_TYPE_P (TREE_TYPE (base)))
2886 off = build_int_cst (adj->alias_ptr_type,
2887 adj->offset / BITS_PER_UNIT);
2888 else
2889 {
2890 HOST_WIDE_INT base_offset;
2891 tree prev_base;
2892
2893 if (TREE_CODE (base) == ADDR_EXPR)
2894 base = TREE_OPERAND (base, 0);
2895 prev_base = base;
2896 base = get_addr_base_and_unit_offset (base, &base_offset);
2897 /* Aggregate arguments can have non-invariant addresses. */
2898 if (!base)
2899 {
2900 base = build_fold_addr_expr (prev_base);
2901 off = build_int_cst (adj->alias_ptr_type,
2902 adj->offset / BITS_PER_UNIT);
2903 }
2904 else if (TREE_CODE (base) == MEM_REF)
2905 {
2906 off = build_int_cst (adj->alias_ptr_type,
2907 base_offset
2908 + adj->offset / BITS_PER_UNIT);
2909 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2910 off);
2911 base = TREE_OPERAND (base, 0);
2912 }
2913 else
2914 {
2915 off = build_int_cst (adj->alias_ptr_type,
2916 base_offset
2917 + adj->offset / BITS_PER_UNIT);
2918 base = build_fold_addr_expr (base);
2919 }
2920 }
2921
2922 if (!adj->by_ref)
2923 {
2924 tree type = adj->type;
2925 unsigned int align;
2926 unsigned HOST_WIDE_INT misalign;
2927
2928 get_pointer_alignment_1 (base, &align, &misalign);
2929 misalign += (tree_to_double_int (off)
2930 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
2931 * BITS_PER_UNIT);
2932 misalign = misalign & (align - 1);
2933 if (misalign != 0)
2934 align = (misalign & -misalign);
2935 if (align < TYPE_ALIGN (type))
2936 type = build_aligned_type (type, align);
2937 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2938 }
2939 else
2940 {
2941 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2942 expr = build_fold_addr_expr (expr);
2943 }
2944
2945 expr = force_gimple_operand_gsi (&gsi, expr,
2946 adj->by_ref
2947 || is_gimple_reg_type (adj->type),
2948 NULL, true, GSI_SAME_STMT);
2949 VEC_quick_push (tree, vargs, expr);
2950 }
2951 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2952 {
2953 unsigned int ix;
2954 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2955 gimple def_temp;
2956
2957 arg = gimple_call_arg (stmt, adj->base_index);
2958 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2959 {
2960 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2961 continue;
2962 arg = fold_convert_loc (gimple_location (stmt),
2963 TREE_TYPE (origin), arg);
2964 }
2965 if (debug_args == NULL)
2966 debug_args = decl_debug_args_insert (callee_decl);
2967 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
2968 if (ddecl == origin)
2969 {
2970 ddecl = VEC_index (tree, *debug_args, ix + 1);
2971 break;
2972 }
2973 if (ddecl == NULL)
2974 {
2975 ddecl = make_node (DEBUG_EXPR_DECL);
2976 DECL_ARTIFICIAL (ddecl) = 1;
2977 TREE_TYPE (ddecl) = TREE_TYPE (origin);
2978 DECL_MODE (ddecl) = DECL_MODE (origin);
2979
2980 VEC_safe_push (tree, gc, *debug_args, origin);
2981 VEC_safe_push (tree, gc, *debug_args, ddecl);
2982 }
2983 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
2984 stmt);
2985 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
2986 }
2987 }
2988
2989 if (dump_file && (dump_flags & TDF_DETAILS))
2990 {
2991 fprintf (dump_file, "replacing stmt:");
2992 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2993 }
2994
2995 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2996 VEC_free (tree, heap, vargs);
2997 if (gimple_call_lhs (stmt))
2998 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2999
3000 gimple_set_block (new_stmt, gimple_block (stmt));
3001 if (gimple_has_location (stmt))
3002 gimple_set_location (new_stmt, gimple_location (stmt));
3003 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3004 gimple_call_copy_flags (new_stmt, stmt);
3005
3006 if (dump_file && (dump_flags & TDF_DETAILS))
3007 {
3008 fprintf (dump_file, "with stmt:");
3009 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3010 fprintf (dump_file, "\n");
3011 }
3012 gsi_replace (&gsi, new_stmt, true);
3013 if (cs)
3014 cgraph_set_call_stmt (cs, new_stmt);
3015 update_ssa (TODO_update_ssa);
3016 free_dominance_info (CDI_DOMINATORS);
3017 }
3018
3019 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3020
3021 static bool
3022 index_in_adjustments_multiple_times_p (int base_index,
3023 ipa_parm_adjustment_vec adjustments)
3024 {
3025 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
3026 bool one = false;
3027
3028 for (i = 0; i < len; i++)
3029 {
3030 struct ipa_parm_adjustment *adj;
3031 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
3032
3033 if (adj->base_index == base_index)
3034 {
3035 if (one)
3036 return true;
3037 else
3038 one = true;
3039 }
3040 }
3041 return false;
3042 }
3043
3044
3045 /* Return adjustments that should have the same effect on function parameters
3046 and call arguments as if they were first changed according to adjustments in
3047 INNER and then by adjustments in OUTER. */
3048
3049 ipa_parm_adjustment_vec
3050 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3051 ipa_parm_adjustment_vec outer)
3052 {
3053 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
3054 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
3055 int removals = 0;
3056 ipa_parm_adjustment_vec adjustments, tmp;
3057
3058 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
3059 for (i = 0; i < inlen; i++)
3060 {
3061 struct ipa_parm_adjustment *n;
3062 n = &VEC_index (ipa_parm_adjustment_t, inner, i);
3063
3064 if (n->remove_param)
3065 removals++;
3066 else
3067 VEC_quick_push (ipa_parm_adjustment_t, tmp, *n);
3068 }
3069
3070 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
3071 for (i = 0; i < outlen; i++)
3072 {
3073 struct ipa_parm_adjustment r;
3074 struct ipa_parm_adjustment *out = &VEC_index (ipa_parm_adjustment_t,
3075 outer, i);
3076 struct ipa_parm_adjustment *in = &VEC_index (ipa_parm_adjustment_t, tmp,
3077 out->base_index);
3078
3079 memset (&r, 0, sizeof (r));
3080 gcc_assert (!in->remove_param);
3081 if (out->remove_param)
3082 {
3083 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3084 {
3085 r.remove_param = true;
3086 VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
3087 }
3088 continue;
3089 }
3090
3091 r.base_index = in->base_index;
3092 r.type = out->type;
3093
3094 /* FIXME: Create nonlocal value too. */
3095
3096 if (in->copy_param && out->copy_param)
3097 r.copy_param = true;
3098 else if (in->copy_param)
3099 r.offset = out->offset;
3100 else if (out->copy_param)
3101 r.offset = in->offset;
3102 else
3103 r.offset = in->offset + out->offset;
3104 VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
3105 }
3106
3107 for (i = 0; i < inlen; i++)
3108 {
3109 struct ipa_parm_adjustment *n = &VEC_index (ipa_parm_adjustment_t,
3110 inner, i);
3111
3112 if (n->remove_param)
3113 VEC_quick_push (ipa_parm_adjustment_t, adjustments, *n);
3114 }
3115
3116 VEC_free (ipa_parm_adjustment_t, heap, tmp);
3117 return adjustments;
3118 }
3119
3120 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3121 friendly way, assuming they are meant to be applied to FNDECL. */
3122
3123 void
3124 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3125 tree fndecl)
3126 {
3127 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
3128 bool first = true;
3129 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
3130
3131 fprintf (file, "IPA param adjustments: ");
3132 for (i = 0; i < len; i++)
3133 {
3134 struct ipa_parm_adjustment *adj;
3135 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
3136
3137 if (!first)
3138 fprintf (file, " ");
3139 else
3140 first = false;
3141
3142 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3143 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
3144 if (adj->base)
3145 {
3146 fprintf (file, ", base: ");
3147 print_generic_expr (file, adj->base, 0);
3148 }
3149 if (adj->reduction)
3150 {
3151 fprintf (file, ", reduction: ");
3152 print_generic_expr (file, adj->reduction, 0);
3153 }
3154 if (adj->new_ssa_base)
3155 {
3156 fprintf (file, ", new_ssa_base: ");
3157 print_generic_expr (file, adj->new_ssa_base, 0);
3158 }
3159
3160 if (adj->copy_param)
3161 fprintf (file, ", copy_param");
3162 else if (adj->remove_param)
3163 fprintf (file, ", remove_param");
3164 else
3165 fprintf (file, ", offset %li", (long) adj->offset);
3166 if (adj->by_ref)
3167 fprintf (file, ", by_ref");
3168 print_node_brief (file, ", type: ", adj->type, 0);
3169 fprintf (file, "\n");
3170 }
3171 VEC_free (tree, heap, parms);
3172 }
3173
3174 /* Stream out jump function JUMP_FUNC to OB. */
3175
3176 static void
3177 ipa_write_jump_function (struct output_block *ob,
3178 struct ipa_jump_func *jump_func)
3179 {
3180 struct ipa_agg_jf_item *item;
3181 struct bitpack_d bp;
3182 int i, count;
3183
3184 streamer_write_uhwi (ob, jump_func->type);
3185 switch (jump_func->type)
3186 {
3187 case IPA_JF_UNKNOWN:
3188 break;
3189 case IPA_JF_KNOWN_TYPE:
3190 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3191 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3192 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3193 break;
3194 case IPA_JF_CONST:
3195 gcc_assert (
3196 EXPR_LOCATION (jump_func->value.constant) == UNKNOWN_LOCATION);
3197 stream_write_tree (ob, jump_func->value.constant, true);
3198 break;
3199 case IPA_JF_PASS_THROUGH:
3200 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3201 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3202 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3203 bp = bitpack_create (ob->main_stream);
3204 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3205 streamer_write_bitpack (&bp);
3206 break;
3207 case IPA_JF_ANCESTOR:
3208 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3209 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3210 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3211 bp = bitpack_create (ob->main_stream);
3212 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3213 streamer_write_bitpack (&bp);
3214 break;
3215 }
3216
3217 count = VEC_length (ipa_agg_jf_item_t, jump_func->agg.items);
3218 streamer_write_uhwi (ob, count);
3219 if (count)
3220 {
3221 bp = bitpack_create (ob->main_stream);
3222 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3223 streamer_write_bitpack (&bp);
3224 }
3225
3226 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items, i, item)
3227 {
3228 streamer_write_uhwi (ob, item->offset);
3229 stream_write_tree (ob, item->value, true);
3230 }
3231 }
3232
3233 /* Read in jump function JUMP_FUNC from IB. */
3234
3235 static void
3236 ipa_read_jump_function (struct lto_input_block *ib,
3237 struct ipa_jump_func *jump_func,
3238 struct data_in *data_in)
3239 {
3240 struct bitpack_d bp;
3241 int i, count;
3242
3243 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
3244 switch (jump_func->type)
3245 {
3246 case IPA_JF_UNKNOWN:
3247 break;
3248 case IPA_JF_KNOWN_TYPE:
3249 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
3250 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
3251 jump_func->value.known_type.component_type = stream_read_tree (ib,
3252 data_in);
3253 break;
3254 case IPA_JF_CONST:
3255 jump_func->value.constant = stream_read_tree (ib, data_in);
3256 break;
3257 case IPA_JF_PASS_THROUGH:
3258 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
3259 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
3260 jump_func->value.pass_through.operation
3261 = (enum tree_code) streamer_read_uhwi (ib);
3262 bp = streamer_read_bitpack (ib);
3263 jump_func->value.pass_through.agg_preserved = bp_unpack_value (&bp, 1);
3264 break;
3265 case IPA_JF_ANCESTOR:
3266 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
3267 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
3268 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
3269 bp = streamer_read_bitpack (ib);
3270 jump_func->value.ancestor.agg_preserved = bp_unpack_value (&bp, 1);
3271 break;
3272 }
3273
3274 count = streamer_read_uhwi (ib);
3275 jump_func->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, count);
3276 if (count)
3277 {
3278 bp = streamer_read_bitpack (ib);
3279 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3280 }
3281 for (i = 0; i < count; i++)
3282 {
3283 struct ipa_agg_jf_item item;
3284 item.offset = streamer_read_uhwi (ib);
3285 item.value = stream_read_tree (ib, data_in);
3286 VEC_quick_push (ipa_agg_jf_item_t, jump_func->agg.items, item);
3287 }
3288 }
3289
3290 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3291 relevant to indirect inlining to OB. */
3292
3293 static void
3294 ipa_write_indirect_edge_info (struct output_block *ob,
3295 struct cgraph_edge *cs)
3296 {
3297 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3298 struct bitpack_d bp;
3299
3300 streamer_write_hwi (ob, ii->param_index);
3301 streamer_write_hwi (ob, ii->offset);
3302 bp = bitpack_create (ob->main_stream);
3303 bp_pack_value (&bp, ii->polymorphic, 1);
3304 bp_pack_value (&bp, ii->agg_contents, 1);
3305 bp_pack_value (&bp, ii->by_ref, 1);
3306 streamer_write_bitpack (&bp);
3307
3308 if (ii->polymorphic)
3309 {
3310 streamer_write_hwi (ob, ii->otr_token);
3311 stream_write_tree (ob, ii->otr_type, true);
3312 }
3313 }
3314
3315 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3316 relevant to indirect inlining from IB. */
3317
3318 static void
3319 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3320 struct data_in *data_in ATTRIBUTE_UNUSED,
3321 struct cgraph_edge *cs)
3322 {
3323 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3324 struct bitpack_d bp;
3325
3326 ii->param_index = (int) streamer_read_hwi (ib);
3327 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3328 bp = streamer_read_bitpack (ib);
3329 ii->polymorphic = bp_unpack_value (&bp, 1);
3330 ii->agg_contents = bp_unpack_value (&bp, 1);
3331 ii->by_ref = bp_unpack_value (&bp, 1);
3332 if (ii->polymorphic)
3333 {
3334 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3335 ii->otr_type = stream_read_tree (ib, data_in);
3336 }
3337 }
3338
3339 /* Stream out NODE info to OB. */
3340
3341 static void
3342 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3343 {
3344 int node_ref;
3345 lto_symtab_encoder_t encoder;
3346 struct ipa_node_params *info = IPA_NODE_REF (node);
3347 int j;
3348 struct cgraph_edge *e;
3349 struct bitpack_d bp;
3350
3351 encoder = ob->decl_state->symtab_node_encoder;
3352 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3353 streamer_write_uhwi (ob, node_ref);
3354
3355 bp = bitpack_create (ob->main_stream);
3356 gcc_assert (info->uses_analysis_done
3357 || ipa_get_param_count (info) == 0);
3358 gcc_assert (!info->node_enqueued);
3359 gcc_assert (!info->ipcp_orig_node);
3360 for (j = 0; j < ipa_get_param_count (info); j++)
3361 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3362 streamer_write_bitpack (&bp);
3363 for (e = node->callees; e; e = e->next_callee)
3364 {
3365 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3366
3367 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3368 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3369 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3370 }
3371 for (e = node->indirect_calls; e; e = e->next_callee)
3372 {
3373 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3374
3375 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3376 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3377 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3378 ipa_write_indirect_edge_info (ob, e);
3379 }
3380 }
3381
3382 /* Stream in NODE info from IB. */
3383
3384 static void
3385 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3386 struct data_in *data_in)
3387 {
3388 struct ipa_node_params *info = IPA_NODE_REF (node);
3389 int k;
3390 struct cgraph_edge *e;
3391 struct bitpack_d bp;
3392
3393 ipa_initialize_node_params (node);
3394
3395 bp = streamer_read_bitpack (ib);
3396 if (ipa_get_param_count (info) != 0)
3397 info->uses_analysis_done = true;
3398 info->node_enqueued = false;
3399 for (k = 0; k < ipa_get_param_count (info); k++)
3400 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3401 for (e = node->callees; e; e = e->next_callee)
3402 {
3403 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3404 int count = streamer_read_uhwi (ib);
3405
3406 if (!count)
3407 continue;
3408 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
3409
3410 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3411 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
3412 }
3413 for (e = node->indirect_calls; e; e = e->next_callee)
3414 {
3415 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3416 int count = streamer_read_uhwi (ib);
3417
3418 if (count)
3419 {
3420 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
3421 count);
3422 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3423 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
3424 data_in);
3425 }
3426 ipa_read_indirect_edge_info (ib, data_in, e);
3427 }
3428 }
3429
3430 /* Write jump functions for nodes in SET. */
3431
3432 void
3433 ipa_prop_write_jump_functions (void)
3434 {
3435 struct cgraph_node *node;
3436 struct output_block *ob;
3437 unsigned int count = 0;
3438 lto_symtab_encoder_iterator lsei;
3439 lto_symtab_encoder_t encoder;
3440
3441
3442 if (!ipa_node_params_vector)
3443 return;
3444
3445 ob = create_output_block (LTO_section_jump_functions);
3446 encoder = ob->decl_state->symtab_node_encoder;
3447 ob->cgraph_node = NULL;
3448 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3449 lsei_next_function_in_partition (&lsei))
3450 {
3451 node = lsei_cgraph_node (lsei);
3452 if (cgraph_function_with_gimple_body_p (node)
3453 && IPA_NODE_REF (node) != NULL)
3454 count++;
3455 }
3456
3457 streamer_write_uhwi (ob, count);
3458
3459 /* Process all of the functions. */
3460 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3461 lsei_next_function_in_partition (&lsei))
3462 {
3463 node = lsei_cgraph_node (lsei);
3464 if (cgraph_function_with_gimple_body_p (node)
3465 && IPA_NODE_REF (node) != NULL)
3466 ipa_write_node_info (ob, node);
3467 }
3468 streamer_write_char_stream (ob->main_stream, 0);
3469 produce_asm (ob, NULL);
3470 destroy_output_block (ob);
3471 }
3472
3473 /* Read section in file FILE_DATA of length LEN with data DATA. */
3474
3475 static void
3476 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3477 size_t len)
3478 {
3479 const struct lto_function_header *header =
3480 (const struct lto_function_header *) data;
3481 const int cfg_offset = sizeof (struct lto_function_header);
3482 const int main_offset = cfg_offset + header->cfg_size;
3483 const int string_offset = main_offset + header->main_size;
3484 struct data_in *data_in;
3485 struct lto_input_block ib_main;
3486 unsigned int i;
3487 unsigned int count;
3488
3489 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3490 header->main_size);
3491
3492 data_in =
3493 lto_data_in_create (file_data, (const char *) data + string_offset,
3494 header->string_size, NULL);
3495 count = streamer_read_uhwi (&ib_main);
3496
3497 for (i = 0; i < count; i++)
3498 {
3499 unsigned int index;
3500 struct cgraph_node *node;
3501 lto_symtab_encoder_t encoder;
3502
3503 index = streamer_read_uhwi (&ib_main);
3504 encoder = file_data->symtab_node_encoder;
3505 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3506 gcc_assert (node->analyzed);
3507 ipa_read_node_info (&ib_main, node, data_in);
3508 }
3509 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3510 len);
3511 lto_data_in_delete (data_in);
3512 }
3513
3514 /* Read ipcp jump functions. */
3515
3516 void
3517 ipa_prop_read_jump_functions (void)
3518 {
3519 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3520 struct lto_file_decl_data *file_data;
3521 unsigned int j = 0;
3522
3523 ipa_check_create_node_params ();
3524 ipa_check_create_edge_args ();
3525 ipa_register_cgraph_hooks ();
3526
3527 while ((file_data = file_data_vec[j++]))
3528 {
3529 size_t len;
3530 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3531
3532 if (data)
3533 ipa_prop_read_section (file_data, data, len);
3534 }
3535 }
3536
3537 /* After merging units, we can get mismatch in argument counts.
3538 Also decl merging might've rendered parameter lists obsolete.
3539 Also compute called_with_variable_arg info. */
3540
3541 void
3542 ipa_update_after_lto_read (void)
3543 {
3544 struct cgraph_node *node;
3545
3546 ipa_check_create_node_params ();
3547 ipa_check_create_edge_args ();
3548
3549 FOR_EACH_DEFINED_FUNCTION (node)
3550 if (node->analyzed)
3551 ipa_initialize_node_params (node);
3552 }