Integrate lexical block into source_location.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
41
42
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
45
46 struct param_analysis_info
47 {
48 bool parm_modified, ref_modified, pt_modified;
49 bitmap parm_visited_statements, pt_visited_statements;
50 };
51
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
56
57 /* Holders of ipa cgraph hooks: */
58 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
59 static struct cgraph_node_hook_list *node_removal_hook_holder;
60 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
61 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
62 static struct cgraph_node_hook_list *function_insertion_hook_holder;
63
64 /* Return index of the formal whose tree is PTREE in function which corresponds
65 to INFO. */
66
67 int
68 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
69 {
70 int i, count;
71
72 count = ipa_get_param_count (info);
73 for (i = 0; i < count; i++)
74 if (ipa_get_param (info, i) == ptree)
75 return i;
76
77 return -1;
78 }
79
80 /* Populate the param_decl field in parameter descriptors of INFO that
81 corresponds to NODE. */
82
83 static void
84 ipa_populate_param_decls (struct cgraph_node *node,
85 struct ipa_node_params *info)
86 {
87 tree fndecl;
88 tree fnargs;
89 tree parm;
90 int param_num;
91
92 fndecl = node->symbol.decl;
93 fnargs = DECL_ARGUMENTS (fndecl);
94 param_num = 0;
95 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
96 {
97 VEC_index (ipa_param_descriptor_t,
98 info->descriptors, param_num).decl = parm;
99 param_num++;
100 }
101 }
102
103 /* Return how many formal parameters FNDECL has. */
104
105 static inline int
106 count_formal_params (tree fndecl)
107 {
108 tree parm;
109 int count = 0;
110
111 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
112 count++;
113
114 return count;
115 }
116
117 /* Initialize the ipa_node_params structure associated with NODE by counting
118 the function parameters, creating the descriptors and populating their
119 param_decls. */
120
121 void
122 ipa_initialize_node_params (struct cgraph_node *node)
123 {
124 struct ipa_node_params *info = IPA_NODE_REF (node);
125
126 if (!info->descriptors)
127 {
128 int param_count;
129
130 param_count = count_formal_params (node->symbol.decl);
131 if (param_count)
132 {
133 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
134 info->descriptors, param_count);
135 ipa_populate_param_decls (node, info);
136 }
137 }
138 }
139
140 /* Print the jump functions associated with call graph edge CS to file F. */
141
142 static void
143 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
144 {
145 int i, count;
146
147 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
148 for (i = 0; i < count; i++)
149 {
150 struct ipa_jump_func *jump_func;
151 enum jump_func_type type;
152
153 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
154 type = jump_func->type;
155
156 fprintf (f, " param %d: ", i);
157 if (type == IPA_JF_UNKNOWN)
158 fprintf (f, "UNKNOWN\n");
159 else if (type == IPA_JF_KNOWN_TYPE)
160 {
161 fprintf (f, "KNOWN TYPE: base ");
162 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
163 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
164 jump_func->value.known_type.offset);
165 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
166 fprintf (f, "\n");
167 }
168 else if (type == IPA_JF_CONST)
169 {
170 tree val = jump_func->value.constant;
171 fprintf (f, "CONST: ");
172 print_generic_expr (f, val, 0);
173 if (TREE_CODE (val) == ADDR_EXPR
174 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
175 {
176 fprintf (f, " -> ");
177 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
178 0);
179 }
180 fprintf (f, "\n");
181 }
182 else if (type == IPA_JF_PASS_THROUGH)
183 {
184 fprintf (f, "PASS THROUGH: ");
185 fprintf (f, "%d, op %s",
186 jump_func->value.pass_through.formal_id,
187 tree_code_name[(int)
188 jump_func->value.pass_through.operation]);
189 if (jump_func->value.pass_through.operation != NOP_EXPR)
190 {
191 fprintf (f, " ");
192 print_generic_expr (f,
193 jump_func->value.pass_through.operand, 0);
194 }
195 if (jump_func->value.pass_through.agg_preserved)
196 fprintf (f, ", agg_preserved");
197 fprintf (f, "\n");
198 }
199 else if (type == IPA_JF_ANCESTOR)
200 {
201 fprintf (f, "ANCESTOR: ");
202 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
203 jump_func->value.ancestor.formal_id,
204 jump_func->value.ancestor.offset);
205 print_generic_expr (f, jump_func->value.ancestor.type, 0);
206 if (jump_func->value.ancestor.agg_preserved)
207 fprintf (f, ", agg_preserved");
208 fprintf (f, "\n");
209 }
210
211 if (jump_func->agg.items)
212 {
213 struct ipa_agg_jf_item *item;
214 int j;
215
216 fprintf (f, " Aggregate passed by %s:\n",
217 jump_func->agg.by_ref ? "reference" : "value");
218 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items,
219 j, item)
220 {
221 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
222 item->offset);
223 if (TYPE_P (item->value))
224 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
225 tree_low_cst (TYPE_SIZE (item->value), 1));
226 else
227 {
228 fprintf (f, "cst: ");
229 print_generic_expr (f, item->value, 0);
230 }
231 fprintf (f, "\n");
232 }
233 }
234 }
235 }
236
237
238 /* Print the jump functions of all arguments on all call graph edges going from
239 NODE to file F. */
240
241 void
242 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
243 {
244 struct cgraph_edge *cs;
245 int i;
246
247 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
248 for (cs = node->callees; cs; cs = cs->next_callee)
249 {
250 if (!ipa_edge_args_info_available_for_edge_p (cs))
251 continue;
252
253 fprintf (f, " callsite %s/%i -> %s/%i : \n",
254 xstrdup (cgraph_node_name (node)), node->uid,
255 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
256 ipa_print_node_jump_functions_for_edge (f, cs);
257 }
258
259 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
260 {
261 if (!ipa_edge_args_info_available_for_edge_p (cs))
262 continue;
263
264 if (cs->call_stmt)
265 {
266 fprintf (f, " indirect callsite %d for stmt ", i);
267 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
268 }
269 else
270 fprintf (f, " indirect callsite %d :\n", i);
271 ipa_print_node_jump_functions_for_edge (f, cs);
272
273 }
274 }
275
276 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
277
278 void
279 ipa_print_all_jump_functions (FILE *f)
280 {
281 struct cgraph_node *node;
282
283 fprintf (f, "\nJump functions:\n");
284 FOR_EACH_FUNCTION (node)
285 {
286 ipa_print_node_jump_functions (f, node);
287 }
288 }
289
290 /* Set JFUNC to be a known type jump function. */
291
292 static void
293 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
294 tree base_type, tree component_type)
295 {
296 jfunc->type = IPA_JF_KNOWN_TYPE;
297 jfunc->value.known_type.offset = offset,
298 jfunc->value.known_type.base_type = base_type;
299 jfunc->value.known_type.component_type = component_type;
300 }
301
302 /* Set JFUNC to be a constant jmp function. */
303
304 static void
305 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
306 {
307 constant = unshare_expr (constant);
308 if (constant && EXPR_P (constant))
309 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
310 jfunc->type = IPA_JF_CONST;
311 jfunc->value.constant = constant;
312 }
313
314 /* Set JFUNC to be a simple pass-through jump function. */
315 static void
316 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
317 bool agg_preserved)
318 {
319 jfunc->type = IPA_JF_PASS_THROUGH;
320 jfunc->value.pass_through.operand = NULL_TREE;
321 jfunc->value.pass_through.formal_id = formal_id;
322 jfunc->value.pass_through.operation = NOP_EXPR;
323 jfunc->value.pass_through.agg_preserved = agg_preserved;
324 }
325
326 /* Set JFUNC to be an arithmetic pass through jump function. */
327
328 static void
329 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
330 tree operand, enum tree_code operation)
331 {
332 jfunc->type = IPA_JF_PASS_THROUGH;
333 jfunc->value.pass_through.operand = operand;
334 jfunc->value.pass_through.formal_id = formal_id;
335 jfunc->value.pass_through.operation = operation;
336 jfunc->value.pass_through.agg_preserved = false;
337 }
338
339 /* Set JFUNC to be an ancestor jump function. */
340
341 static void
342 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
343 tree type, int formal_id, bool agg_preserved)
344 {
345 jfunc->type = IPA_JF_ANCESTOR;
346 jfunc->value.ancestor.formal_id = formal_id;
347 jfunc->value.ancestor.offset = offset;
348 jfunc->value.ancestor.type = type;
349 jfunc->value.ancestor.agg_preserved = agg_preserved;
350 }
351
352 /* Structure to be passed in between detect_type_change and
353 check_stmt_for_type_change. */
354
355 struct type_change_info
356 {
357 /* Offset into the object where there is the virtual method pointer we are
358 looking for. */
359 HOST_WIDE_INT offset;
360 /* The declaration or SSA_NAME pointer of the base that we are checking for
361 type change. */
362 tree object;
363 /* If we actually can tell the type that the object has changed to, it is
364 stored in this field. Otherwise it remains NULL_TREE. */
365 tree known_current_type;
366 /* Set to true if dynamic type change has been detected. */
367 bool type_maybe_changed;
368 /* Set to true if multiple types have been encountered. known_current_type
369 must be disregarded in that case. */
370 bool multiple_types_encountered;
371 };
372
373 /* Return true if STMT can modify a virtual method table pointer.
374
375 This function makes special assumptions about both constructors and
376 destructors which are all the functions that are allowed to alter the VMT
377 pointers. It assumes that destructors begin with assignment into all VMT
378 pointers and that constructors essentially look in the following way:
379
380 1) The very first thing they do is that they call constructors of ancestor
381 sub-objects that have them.
382
383 2) Then VMT pointers of this and all its ancestors is set to new values
384 corresponding to the type corresponding to the constructor.
385
386 3) Only afterwards, other stuff such as constructor of member sub-objects
387 and the code written by the user is run. Only this may include calling
388 virtual functions, directly or indirectly.
389
390 There is no way to call a constructor of an ancestor sub-object in any
391 other way.
392
393 This means that we do not have to care whether constructors get the correct
394 type information because they will always change it (in fact, if we define
395 the type to be given by the VMT pointer, it is undefined).
396
397 The most important fact to derive from the above is that if, for some
398 statement in the section 3, we try to detect whether the dynamic type has
399 changed, we can safely ignore all calls as we examine the function body
400 backwards until we reach statements in section 2 because these calls cannot
401 be ancestor constructors or destructors (if the input is not bogus) and so
402 do not change the dynamic type (this holds true only for automatically
403 allocated objects but at the moment we devirtualize only these). We then
404 must detect that statements in section 2 change the dynamic type and can try
405 to derive the new type. That is enough and we can stop, we will never see
406 the calls into constructors of sub-objects in this code. Therefore we can
407 safely ignore all call statements that we traverse.
408 */
409
410 static bool
411 stmt_may_be_vtbl_ptr_store (gimple stmt)
412 {
413 if (is_gimple_call (stmt))
414 return false;
415 else if (is_gimple_assign (stmt))
416 {
417 tree lhs = gimple_assign_lhs (stmt);
418
419 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
420 {
421 if (flag_strict_aliasing
422 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
423 return false;
424
425 if (TREE_CODE (lhs) == COMPONENT_REF
426 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
427 return false;
428 /* In the future we might want to use get_base_ref_and_offset to find
429 if there is a field corresponding to the offset and if so, proceed
430 almost like if it was a component ref. */
431 }
432 }
433 return true;
434 }
435
436 /* If STMT can be proved to be an assignment to the virtual method table
437 pointer of ANALYZED_OBJ and the type associated with the new table
438 identified, return the type. Otherwise return NULL_TREE. */
439
440 static tree
441 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
442 {
443 HOST_WIDE_INT offset, size, max_size;
444 tree lhs, rhs, base;
445
446 if (!gimple_assign_single_p (stmt))
447 return NULL_TREE;
448
449 lhs = gimple_assign_lhs (stmt);
450 rhs = gimple_assign_rhs1 (stmt);
451 if (TREE_CODE (lhs) != COMPONENT_REF
452 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
453 || TREE_CODE (rhs) != ADDR_EXPR)
454 return NULL_TREE;
455 rhs = get_base_address (TREE_OPERAND (rhs, 0));
456 if (!rhs
457 || TREE_CODE (rhs) != VAR_DECL
458 || !DECL_VIRTUAL_P (rhs))
459 return NULL_TREE;
460
461 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
462 if (offset != tci->offset
463 || size != POINTER_SIZE
464 || max_size != POINTER_SIZE)
465 return NULL_TREE;
466 if (TREE_CODE (base) == MEM_REF)
467 {
468 if (TREE_CODE (tci->object) != MEM_REF
469 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
470 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
471 TREE_OPERAND (base, 1)))
472 return NULL_TREE;
473 }
474 else if (tci->object != base)
475 return NULL_TREE;
476
477 return DECL_CONTEXT (rhs);
478 }
479
480 /* Callback of walk_aliased_vdefs and a helper function for
481 detect_type_change to check whether a particular statement may modify
482 the virtual table pointer, and if possible also determine the new type of
483 the (sub-)object. It stores its result into DATA, which points to a
484 type_change_info structure. */
485
486 static bool
487 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
488 {
489 gimple stmt = SSA_NAME_DEF_STMT (vdef);
490 struct type_change_info *tci = (struct type_change_info *) data;
491
492 if (stmt_may_be_vtbl_ptr_store (stmt))
493 {
494 tree type;
495 type = extr_type_from_vtbl_ptr_store (stmt, tci);
496 if (tci->type_maybe_changed
497 && type != tci->known_current_type)
498 tci->multiple_types_encountered = true;
499 tci->known_current_type = type;
500 tci->type_maybe_changed = true;
501 return true;
502 }
503 else
504 return false;
505 }
506
507
508
509 /* Like detect_type_change but with extra argument COMP_TYPE which will become
510 the component type part of new JFUNC of dynamic type change is detected and
511 the new base type is identified. */
512
513 static bool
514 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
515 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
516 {
517 struct type_change_info tci;
518 ao_ref ao;
519
520 gcc_checking_assert (DECL_P (arg)
521 || TREE_CODE (arg) == MEM_REF
522 || handled_component_p (arg));
523 /* Const calls cannot call virtual methods through VMT and so type changes do
524 not matter. */
525 if (!flag_devirtualize || !gimple_vuse (call))
526 return false;
527
528 ao_ref_init (&ao, arg);
529 ao.base = base;
530 ao.offset = offset;
531 ao.size = POINTER_SIZE;
532 ao.max_size = ao.size;
533
534 tci.offset = offset;
535 tci.object = get_base_address (arg);
536 tci.known_current_type = NULL_TREE;
537 tci.type_maybe_changed = false;
538 tci.multiple_types_encountered = false;
539
540 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
541 &tci, NULL);
542 if (!tci.type_maybe_changed)
543 return false;
544
545 if (!tci.known_current_type
546 || tci.multiple_types_encountered
547 || offset != 0)
548 jfunc->type = IPA_JF_UNKNOWN;
549 else
550 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
551
552 return true;
553 }
554
555 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
556 looking for assignments to its virtual table pointer. If it is, return true
557 and fill in the jump function JFUNC with relevant type information or set it
558 to unknown. ARG is the object itself (not a pointer to it, unless
559 dereferenced). BASE is the base of the memory access as returned by
560 get_ref_base_and_extent, as is the offset. */
561
562 static bool
563 detect_type_change (tree arg, tree base, gimple call,
564 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
565 {
566 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
567 }
568
569 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
570 SSA name (its dereference will become the base and the offset is assumed to
571 be zero). */
572
573 static bool
574 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
575 {
576 tree comp_type;
577
578 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
579 if (!flag_devirtualize
580 || !POINTER_TYPE_P (TREE_TYPE (arg))
581 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
582 return false;
583
584 comp_type = TREE_TYPE (TREE_TYPE (arg));
585 arg = build2 (MEM_REF, ptr_type_node, arg,
586 build_int_cst (ptr_type_node, 0));
587
588 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
589 }
590
591 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
592 boolean variable pointed to by DATA. */
593
594 static bool
595 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
596 void *data)
597 {
598 bool *b = (bool *) data;
599 *b = true;
600 return true;
601 }
602
603 /* Return true if a load from a formal parameter PARM_LOAD is known to retreive
604 a value known not to be modified in this function before reaching the
605 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
606 information about the parameter. */
607
608 static bool
609 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
610 gimple stmt, tree parm_load)
611 {
612 bool modified = false;
613 bitmap *visited_stmts;
614 ao_ref refd;
615
616 if (parm_ainfo && parm_ainfo->parm_modified)
617 return false;
618
619 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
620 ao_ref_init (&refd, parm_load);
621 /* We can cache visited statements only when parm_ainfo is available and when
622 we are looking at a naked load of the whole parameter. */
623 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
624 visited_stmts = NULL;
625 else
626 visited_stmts = &parm_ainfo->parm_visited_statements;
627 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
628 visited_stmts);
629 if (parm_ainfo && modified)
630 parm_ainfo->parm_modified = true;
631 return !modified;
632 }
633
634 /* If STMT is an assignment that loads a value from an parameter declaration,
635 return the index of the parameter in ipa_node_params which has not been
636 modified. Otherwise return -1. */
637
638 static int
639 load_from_unmodified_param (struct ipa_node_params *info,
640 struct param_analysis_info *parms_ainfo,
641 gimple stmt)
642 {
643 int index;
644 tree op1;
645
646 if (!gimple_assign_single_p (stmt))
647 return -1;
648
649 op1 = gimple_assign_rhs1 (stmt);
650 if (TREE_CODE (op1) != PARM_DECL)
651 return -1;
652
653 index = ipa_get_param_decl_index (info, op1);
654 if (index < 0
655 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
656 : NULL, stmt, op1))
657 return -1;
658
659 return index;
660 }
661
662 /* Return true if memory reference REF loads data that are known to be
663 unmodified in this function before reaching statement STMT. PARM_AINFO, if
664 non-NULL, is a pointer to a structure containing temporary information about
665 PARM. */
666
667 static bool
668 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
669 gimple stmt, tree ref)
670 {
671 bool modified = false;
672 ao_ref refd;
673
674 gcc_checking_assert (gimple_vuse (stmt));
675 if (parm_ainfo && parm_ainfo->ref_modified)
676 return false;
677
678 ao_ref_init (&refd, ref);
679 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
680 NULL);
681 if (parm_ainfo && modified)
682 parm_ainfo->ref_modified = true;
683 return !modified;
684 }
685
686 /* Return true if the data pointed to by PARM is known to be unmodified in this
687 function before reaching call statement CALL into which it is passed.
688 PARM_AINFO is a pointer to a structure containing temporary information
689 about PARM. */
690
691 static bool
692 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
693 gimple call, tree parm)
694 {
695 bool modified = false;
696 ao_ref refd;
697
698 /* It's unnecessary to calculate anything about memory contnets for a const
699 function because it is not goin to use it. But do not cache the result
700 either. Also, no such calculations for non-pointers. */
701 if (!gimple_vuse (call)
702 || !POINTER_TYPE_P (TREE_TYPE (parm)))
703 return false;
704
705 if (parm_ainfo->pt_modified)
706 return false;
707
708 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
709 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
710 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
711 if (modified)
712 parm_ainfo->pt_modified = true;
713 return !modified;
714 }
715
716 /* Return true if we can prove that OP is a memory reference loading unmodified
717 data from an aggregate passed as a parameter and if the aggregate is passed
718 by reference, that the alias type of the load corresponds to the type of the
719 formal parameter (so that we can rely on this type for TBAA in callers).
720 INFO and PARMS_AINFO describe parameters of the current function (but the
721 latter can be NULL), STMT is the load statement. If function returns true,
722 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
723 within the aggregate and whether it is a load from a value passed by
724 reference respectively. */
725
726 static bool
727 ipa_load_from_parm_agg_1 (struct ipa_node_params *info,
728 struct param_analysis_info *parms_ainfo, gimple stmt,
729 tree op, int *index_p, HOST_WIDE_INT *offset_p,
730 bool *by_ref_p)
731 {
732 int index;
733 HOST_WIDE_INT size, max_size;
734 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
735
736 if (max_size == -1 || max_size != size || *offset_p < 0)
737 return false;
738
739 if (DECL_P (base))
740 {
741 int index = ipa_get_param_decl_index (info, base);
742 if (index >= 0
743 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
744 : NULL, stmt, op))
745 {
746 *index_p = index;
747 *by_ref_p = false;
748 return true;
749 }
750 return false;
751 }
752
753 if (TREE_CODE (base) != MEM_REF
754 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
755 || !integer_zerop (TREE_OPERAND (base, 1)))
756 return false;
757
758 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
759 {
760 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
761 index = ipa_get_param_decl_index (info, parm);
762 }
763 else
764 {
765 /* This branch catches situations where a pointer parameter is not a
766 gimple register, for example:
767
768 void hip7(S*) (struct S * p)
769 {
770 void (*<T2e4>) (struct S *) D.1867;
771 struct S * p.1;
772
773 <bb 2>:
774 p.1_1 = p;
775 D.1867_2 = p.1_1->f;
776 D.1867_2 ();
777 gdp = &p;
778 */
779
780 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
781 index = load_from_unmodified_param (info, parms_ainfo, def);
782 }
783
784 if (index >= 0
785 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
786 stmt, op))
787 {
788 *index_p = index;
789 *by_ref_p = true;
790 return true;
791 }
792 return false;
793 }
794
795 /* Just like the previous function, just without the param_analysis_info
796 pointer, for users outside of this file. */
797
798 bool
799 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
800 tree op, int *index_p, HOST_WIDE_INT *offset_p,
801 bool *by_ref_p)
802 {
803 return ipa_load_from_parm_agg_1 (info, NULL, stmt, op, index_p, offset_p,
804 by_ref_p);
805 }
806
807 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
808 of an assignment statement STMT, try to determine whether we are actually
809 handling any of the following cases and construct an appropriate jump
810 function into JFUNC if so:
811
812 1) The passed value is loaded from a formal parameter which is not a gimple
813 register (most probably because it is addressable, the value has to be
814 scalar) and we can guarantee the value has not changed. This case can
815 therefore be described by a simple pass-through jump function. For example:
816
817 foo (int a)
818 {
819 int a.0;
820
821 a.0_2 = a;
822 bar (a.0_2);
823
824 2) The passed value can be described by a simple arithmetic pass-through
825 jump function. E.g.
826
827 foo (int a)
828 {
829 int D.2064;
830
831 D.2064_4 = a.1(D) + 4;
832 bar (D.2064_4);
833
834 This case can also occur in combination of the previous one, e.g.:
835
836 foo (int a, int z)
837 {
838 int a.0;
839 int D.2064;
840
841 a.0_3 = a;
842 D.2064_4 = a.0_3 + 4;
843 foo (D.2064_4);
844
845 3) The passed value is an address of an object within another one (which
846 also passed by reference). Such situations are described by an ancestor
847 jump function and describe situations such as:
848
849 B::foo() (struct B * const this)
850 {
851 struct A * D.1845;
852
853 D.1845_2 = &this_1(D)->D.1748;
854 A::bar (D.1845_2);
855
856 INFO is the structure describing individual parameters access different
857 stages of IPA optimizations. PARMS_AINFO contains the information that is
858 only needed for intraprocedural analysis. */
859
860 static void
861 compute_complex_assign_jump_func (struct ipa_node_params *info,
862 struct param_analysis_info *parms_ainfo,
863 struct ipa_jump_func *jfunc,
864 gimple call, gimple stmt, tree name)
865 {
866 HOST_WIDE_INT offset, size, max_size;
867 tree op1, tc_ssa, base, ssa;
868 int index;
869
870 op1 = gimple_assign_rhs1 (stmt);
871
872 if (TREE_CODE (op1) == SSA_NAME)
873 {
874 if (SSA_NAME_IS_DEFAULT_DEF (op1))
875 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
876 else
877 index = load_from_unmodified_param (info, parms_ainfo,
878 SSA_NAME_DEF_STMT (op1));
879 tc_ssa = op1;
880 }
881 else
882 {
883 index = load_from_unmodified_param (info, parms_ainfo, stmt);
884 tc_ssa = gimple_assign_lhs (stmt);
885 }
886
887 if (index >= 0)
888 {
889 tree op2 = gimple_assign_rhs2 (stmt);
890
891 if (op2)
892 {
893 if (!is_gimple_ip_invariant (op2)
894 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
895 && !useless_type_conversion_p (TREE_TYPE (name),
896 TREE_TYPE (op1))))
897 return;
898
899 ipa_set_jf_arith_pass_through (jfunc, index, op2,
900 gimple_assign_rhs_code (stmt));
901 }
902 else if (gimple_assign_single_p (stmt)
903 && !detect_type_change_ssa (tc_ssa, call, jfunc))
904 {
905 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
906 call, tc_ssa);
907 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
908 }
909 return;
910 }
911
912 if (TREE_CODE (op1) != ADDR_EXPR)
913 return;
914 op1 = TREE_OPERAND (op1, 0);
915 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
916 return;
917 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
918 if (TREE_CODE (base) != MEM_REF
919 /* If this is a varying address, punt. */
920 || max_size == -1
921 || max_size != size)
922 return;
923 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
924 ssa = TREE_OPERAND (base, 0);
925 if (TREE_CODE (ssa) != SSA_NAME
926 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
927 || offset < 0)
928 return;
929
930 /* Dynamic types are changed only in constructors and destructors and */
931 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
932 if (index >= 0
933 && !detect_type_change (op1, base, call, jfunc, offset))
934 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
935 parm_ref_data_pass_through_p (&parms_ainfo[index],
936 call, ssa));
937 }
938
939 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
940 it looks like:
941
942 iftmp.1_3 = &obj_2(D)->D.1762;
943
944 The base of the MEM_REF must be a default definition SSA NAME of a
945 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
946 whole MEM_REF expression is returned and the offset calculated from any
947 handled components and the MEM_REF itself is stored into *OFFSET. The whole
948 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
949
950 static tree
951 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
952 {
953 HOST_WIDE_INT size, max_size;
954 tree expr, parm, obj;
955
956 if (!gimple_assign_single_p (assign))
957 return NULL_TREE;
958 expr = gimple_assign_rhs1 (assign);
959
960 if (TREE_CODE (expr) != ADDR_EXPR)
961 return NULL_TREE;
962 expr = TREE_OPERAND (expr, 0);
963 obj = expr;
964 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
965
966 if (TREE_CODE (expr) != MEM_REF
967 /* If this is a varying address, punt. */
968 || max_size == -1
969 || max_size != size
970 || *offset < 0)
971 return NULL_TREE;
972 parm = TREE_OPERAND (expr, 0);
973 if (TREE_CODE (parm) != SSA_NAME
974 || !SSA_NAME_IS_DEFAULT_DEF (parm)
975 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
976 return NULL_TREE;
977
978 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
979 *obj_p = obj;
980 return expr;
981 }
982
983
984 /* Given that an actual argument is an SSA_NAME that is a result of a phi
985 statement PHI, try to find out whether NAME is in fact a
986 multiple-inheritance typecast from a descendant into an ancestor of a formal
987 parameter and thus can be described by an ancestor jump function and if so,
988 write the appropriate function into JFUNC.
989
990 Essentially we want to match the following pattern:
991
992 if (obj_2(D) != 0B)
993 goto <bb 3>;
994 else
995 goto <bb 4>;
996
997 <bb 3>:
998 iftmp.1_3 = &obj_2(D)->D.1762;
999
1000 <bb 4>:
1001 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1002 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1003 return D.1879_6; */
1004
1005 static void
1006 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1007 struct param_analysis_info *parms_ainfo,
1008 struct ipa_jump_func *jfunc,
1009 gimple call, gimple phi)
1010 {
1011 HOST_WIDE_INT offset;
1012 gimple assign, cond;
1013 basic_block phi_bb, assign_bb, cond_bb;
1014 tree tmp, parm, expr, obj;
1015 int index, i;
1016
1017 if (gimple_phi_num_args (phi) != 2)
1018 return;
1019
1020 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1021 tmp = PHI_ARG_DEF (phi, 0);
1022 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1023 tmp = PHI_ARG_DEF (phi, 1);
1024 else
1025 return;
1026 if (TREE_CODE (tmp) != SSA_NAME
1027 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1028 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1029 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1030 return;
1031
1032 assign = SSA_NAME_DEF_STMT (tmp);
1033 assign_bb = gimple_bb (assign);
1034 if (!single_pred_p (assign_bb))
1035 return;
1036 expr = get_ancestor_addr_info (assign, &obj, &offset);
1037 if (!expr)
1038 return;
1039 parm = TREE_OPERAND (expr, 0);
1040 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1041 gcc_assert (index >= 0);
1042
1043 cond_bb = single_pred (assign_bb);
1044 cond = last_stmt (cond_bb);
1045 if (!cond
1046 || gimple_code (cond) != GIMPLE_COND
1047 || gimple_cond_code (cond) != NE_EXPR
1048 || gimple_cond_lhs (cond) != parm
1049 || !integer_zerop (gimple_cond_rhs (cond)))
1050 return;
1051
1052 phi_bb = gimple_bb (phi);
1053 for (i = 0; i < 2; i++)
1054 {
1055 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1056 if (pred != assign_bb && pred != cond_bb)
1057 return;
1058 }
1059
1060 if (!detect_type_change (obj, expr, call, jfunc, offset))
1061 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1062 parm_ref_data_pass_through_p (&parms_ainfo[index],
1063 call, parm));
1064 }
1065
1066 /* Given OP which is passed as an actual argument to a called function,
1067 determine if it is possible to construct a KNOWN_TYPE jump function for it
1068 and if so, create one and store it to JFUNC. */
1069
1070 static void
1071 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1072 gimple call)
1073 {
1074 HOST_WIDE_INT offset, size, max_size;
1075 tree base;
1076
1077 if (!flag_devirtualize
1078 || TREE_CODE (op) != ADDR_EXPR
1079 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1080 return;
1081
1082 op = TREE_OPERAND (op, 0);
1083 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1084 if (!DECL_P (base)
1085 || max_size == -1
1086 || max_size != size
1087 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1088 || is_global_var (base))
1089 return;
1090
1091 if (!TYPE_BINFO (TREE_TYPE (base))
1092 || detect_type_change (op, base, call, jfunc, offset))
1093 return;
1094
1095 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1096 }
1097
1098 /* Inspect the given TYPE and return true iff it has the same structure (the
1099 same number of fields of the same types) as a C++ member pointer. If
1100 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1101 corresponding fields there. */
1102
1103 static bool
1104 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1105 {
1106 tree fld;
1107
1108 if (TREE_CODE (type) != RECORD_TYPE)
1109 return false;
1110
1111 fld = TYPE_FIELDS (type);
1112 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1113 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1114 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1115 return false;
1116
1117 if (method_ptr)
1118 *method_ptr = fld;
1119
1120 fld = DECL_CHAIN (fld);
1121 if (!fld || INTEGRAL_TYPE_P (fld)
1122 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1123 return false;
1124 if (delta)
1125 *delta = fld;
1126
1127 if (DECL_CHAIN (fld))
1128 return false;
1129
1130 return true;
1131 }
1132
1133 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1134 return the rhs of its defining statement. Otherwise return RHS as it
1135 is. */
1136
1137 static inline tree
1138 get_ssa_def_if_simple_copy (tree rhs)
1139 {
1140 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1141 {
1142 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1143
1144 if (gimple_assign_single_p (def_stmt))
1145 rhs = gimple_assign_rhs1 (def_stmt);
1146 else
1147 break;
1148 }
1149 return rhs;
1150 }
1151
1152 /* Simple linked list, describing known contents of an aggregate beforere
1153 call. */
1154
1155 struct ipa_known_agg_contents_list
1156 {
1157 /* Offset and size of the described part of the aggregate. */
1158 HOST_WIDE_INT offset, size;
1159 /* Known constant value or NULL if the contents is known to be unknown. */
1160 tree constant;
1161 /* Pointer to the next structure in the list. */
1162 struct ipa_known_agg_contents_list *next;
1163 };
1164
1165 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1166 in ARG is filled in with constant values. ARG can either be an aggregate
1167 expression or a pointer to an aggregate. JFUNC is the jump function into
1168 which the constants are subsequently stored. */
1169
1170 static void
1171 determine_known_aggregate_parts (gimple call, tree arg,
1172 struct ipa_jump_func *jfunc)
1173 {
1174 struct ipa_known_agg_contents_list *list = NULL;
1175 int item_count = 0, const_count = 0;
1176 HOST_WIDE_INT arg_offset, arg_size;
1177 gimple_stmt_iterator gsi;
1178 tree arg_base;
1179 bool check_ref, by_ref;
1180 ao_ref r;
1181
1182 /* The function operates in three stages. First, we prepare check_ref, r,
1183 arg_base and arg_offset based on what is actually passed as an actual
1184 argument. */
1185
1186 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1187 {
1188 by_ref = true;
1189 if (TREE_CODE (arg) == SSA_NAME)
1190 {
1191 tree type_size;
1192 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1193 return;
1194 check_ref = true;
1195 arg_base = arg;
1196 arg_offset = 0;
1197 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1198 arg_size = tree_low_cst (type_size, 1);
1199 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1200 }
1201 else if (TREE_CODE (arg) == ADDR_EXPR)
1202 {
1203 HOST_WIDE_INT arg_max_size;
1204
1205 arg = TREE_OPERAND (arg, 0);
1206 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1207 &arg_max_size);
1208 if (arg_max_size == -1
1209 || arg_max_size != arg_size
1210 || arg_offset < 0)
1211 return;
1212 if (DECL_P (arg_base))
1213 {
1214 tree size;
1215 check_ref = false;
1216 size = build_int_cst (integer_type_node, arg_size);
1217 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1218 }
1219 else
1220 return;
1221 }
1222 else
1223 return;
1224 }
1225 else
1226 {
1227 HOST_WIDE_INT arg_max_size;
1228
1229 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1230
1231 by_ref = false;
1232 check_ref = false;
1233 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1234 &arg_max_size);
1235 if (arg_max_size == -1
1236 || arg_max_size != arg_size
1237 || arg_offset < 0)
1238 return;
1239
1240 ao_ref_init (&r, arg);
1241 }
1242
1243 /* Second stage walks back the BB, looks at individual statements and as long
1244 as it is confident of how the statements affect contents of the
1245 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1246 describing it. */
1247 gsi = gsi_for_stmt (call);
1248 gsi_prev (&gsi);
1249 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1250 {
1251 struct ipa_known_agg_contents_list *n, **p;
1252 gimple stmt = gsi_stmt (gsi);
1253 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1254 tree lhs, rhs, lhs_base;
1255 bool partial_overlap;
1256
1257 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1258 continue;
1259 if (!gimple_assign_single_p (stmt))
1260 break;
1261
1262 lhs = gimple_assign_lhs (stmt);
1263 rhs = gimple_assign_rhs1 (stmt);
1264 if (!is_gimple_reg_type (rhs))
1265 break;
1266
1267 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1268 &lhs_max_size);
1269 if (lhs_max_size == -1
1270 || lhs_max_size != lhs_size
1271 || (lhs_offset < arg_offset
1272 && lhs_offset + lhs_size > arg_offset)
1273 || (lhs_offset < arg_offset + arg_size
1274 && lhs_offset + lhs_size > arg_offset + arg_size))
1275 break;
1276
1277 if (check_ref)
1278 {
1279 if (TREE_CODE (lhs_base) != MEM_REF
1280 || TREE_OPERAND (lhs_base, 0) != arg_base
1281 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1282 break;
1283 }
1284 else if (lhs_base != arg_base)
1285 break;
1286
1287 if (lhs_offset + lhs_size < arg_offset
1288 || lhs_offset >= (arg_offset + arg_size))
1289 continue;
1290
1291 partial_overlap = false;
1292 p = &list;
1293 while (*p && (*p)->offset < lhs_offset)
1294 {
1295 if ((*p)->offset + (*p)->size > lhs_offset)
1296 {
1297 partial_overlap = true;
1298 break;
1299 }
1300 p = &(*p)->next;
1301 }
1302 if (partial_overlap)
1303 break;
1304 if (*p && (*p)->offset < lhs_offset + lhs_size)
1305 {
1306 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1307 /* We already know this value is subsequently overwritten with
1308 something else. */
1309 continue;
1310 else
1311 /* Otherwise this is a partial overlap which we cannot
1312 represent. */
1313 break;
1314 }
1315
1316 rhs = get_ssa_def_if_simple_copy (rhs);
1317 n = XALLOCA (struct ipa_known_agg_contents_list);
1318 n->size = lhs_size;
1319 n->offset = lhs_offset;
1320 if (is_gimple_ip_invariant (rhs))
1321 {
1322 n->constant = rhs;
1323 const_count++;
1324 }
1325 else
1326 n->constant = NULL_TREE;
1327 n->next = *p;
1328 *p = n;
1329
1330 item_count++;
1331 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1332 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1333 break;
1334 }
1335
1336 /* Third stage just goes over the list and creates an appropriate vector of
1337 ipa_agg_jf_item structures out of it, of sourse only if there are
1338 any known constants to begin with. */
1339
1340 if (const_count)
1341 {
1342 jfunc->agg.by_ref = by_ref;
1343 jfunc->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, const_count);
1344 while (list)
1345 {
1346 if (list->constant)
1347 {
1348 struct ipa_agg_jf_item item;
1349 item.offset = list->offset - arg_offset;
1350 item.value = list->constant;
1351 VEC_quick_push (ipa_agg_jf_item_t, jfunc->agg.items, item);
1352 }
1353 list = list->next;
1354 }
1355 }
1356 }
1357
1358 /* Compute jump function for all arguments of callsite CS and insert the
1359 information in the jump_functions array in the ipa_edge_args corresponding
1360 to this callsite. */
1361
1362 static void
1363 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1364 struct cgraph_edge *cs)
1365 {
1366 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1367 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1368 gimple call = cs->call_stmt;
1369 int n, arg_num = gimple_call_num_args (call);
1370
1371 if (arg_num == 0 || args->jump_functions)
1372 return;
1373 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
1374
1375 for (n = 0; n < arg_num; n++)
1376 {
1377 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1378 tree arg = gimple_call_arg (call, n);
1379
1380 if (is_gimple_ip_invariant (arg))
1381 ipa_set_jf_constant (jfunc, arg);
1382 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1383 && TREE_CODE (arg) == PARM_DECL)
1384 {
1385 int index = ipa_get_param_decl_index (info, arg);
1386
1387 gcc_assert (index >=0);
1388 /* Aggregate passed by value, check for pass-through, otherwise we
1389 will attempt to fill in aggregate contents later in this
1390 for cycle. */
1391 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1392 {
1393 ipa_set_jf_simple_pass_through (jfunc, index, false);
1394 continue;
1395 }
1396 }
1397 else if (TREE_CODE (arg) == SSA_NAME)
1398 {
1399 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1400 {
1401 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1402 if (index >= 0
1403 && !detect_type_change_ssa (arg, call, jfunc))
1404 {
1405 bool agg_p;
1406 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1407 call, arg);
1408 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1409 }
1410 }
1411 else
1412 {
1413 gimple stmt = SSA_NAME_DEF_STMT (arg);
1414 if (is_gimple_assign (stmt))
1415 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1416 call, stmt, arg);
1417 else if (gimple_code (stmt) == GIMPLE_PHI)
1418 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1419 call, stmt);
1420 }
1421 }
1422 else
1423 compute_known_type_jump_func (arg, jfunc, call);
1424
1425 if ((jfunc->type != IPA_JF_PASS_THROUGH
1426 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1427 && (jfunc->type != IPA_JF_ANCESTOR
1428 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1429 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1430 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1431 determine_known_aggregate_parts (call, arg, jfunc);
1432 }
1433 }
1434
1435 /* Compute jump functions for all edges - both direct and indirect - outgoing
1436 from NODE. Also count the actual arguments in the process. */
1437
1438 static void
1439 ipa_compute_jump_functions (struct cgraph_node *node,
1440 struct param_analysis_info *parms_ainfo)
1441 {
1442 struct cgraph_edge *cs;
1443
1444 for (cs = node->callees; cs; cs = cs->next_callee)
1445 {
1446 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1447 NULL);
1448 /* We do not need to bother analyzing calls to unknown
1449 functions unless they may become known during lto/whopr. */
1450 if (!callee->analyzed && !flag_lto)
1451 continue;
1452 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1453 }
1454
1455 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1456 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1457 }
1458
1459 /* If STMT looks like a statement loading a value from a member pointer formal
1460 parameter, return that parameter and store the offset of the field to
1461 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1462 might be clobbered). If USE_DELTA, then we look for a use of the delta
1463 field rather than the pfn. */
1464
1465 static tree
1466 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1467 HOST_WIDE_INT *offset_p)
1468 {
1469 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1470
1471 if (!gimple_assign_single_p (stmt))
1472 return NULL_TREE;
1473
1474 rhs = gimple_assign_rhs1 (stmt);
1475 if (TREE_CODE (rhs) == COMPONENT_REF)
1476 {
1477 ref_field = TREE_OPERAND (rhs, 1);
1478 rhs = TREE_OPERAND (rhs, 0);
1479 }
1480 else
1481 ref_field = NULL_TREE;
1482 if (TREE_CODE (rhs) != MEM_REF)
1483 return NULL_TREE;
1484 rec = TREE_OPERAND (rhs, 0);
1485 if (TREE_CODE (rec) != ADDR_EXPR)
1486 return NULL_TREE;
1487 rec = TREE_OPERAND (rec, 0);
1488 if (TREE_CODE (rec) != PARM_DECL
1489 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1490 return NULL_TREE;
1491 ref_offset = TREE_OPERAND (rhs, 1);
1492
1493 if (use_delta)
1494 fld = delta_field;
1495 else
1496 fld = ptr_field;
1497 if (offset_p)
1498 *offset_p = int_bit_position (fld);
1499
1500 if (ref_field)
1501 {
1502 if (integer_nonzerop (ref_offset))
1503 return NULL_TREE;
1504 return ref_field == fld ? rec : NULL_TREE;
1505 }
1506 else
1507 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1508 : NULL_TREE;
1509 }
1510
1511 /* Returns true iff T is an SSA_NAME defined by a statement. */
1512
1513 static bool
1514 ipa_is_ssa_with_stmt_def (tree t)
1515 {
1516 if (TREE_CODE (t) == SSA_NAME
1517 && !SSA_NAME_IS_DEFAULT_DEF (t))
1518 return true;
1519 else
1520 return false;
1521 }
1522
1523 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1524 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1525 indirect call graph edge. */
1526
1527 static struct cgraph_edge *
1528 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1529 {
1530 struct cgraph_edge *cs;
1531
1532 cs = cgraph_edge (node, stmt);
1533 cs->indirect_info->param_index = param_index;
1534 cs->indirect_info->offset = 0;
1535 cs->indirect_info->polymorphic = 0;
1536 cs->indirect_info->agg_contents = 0;
1537 return cs;
1538 }
1539
1540 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1541 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1542 intermediate information about each formal parameter. Currently it checks
1543 whether the call calls a pointer that is a formal parameter and if so, the
1544 parameter is marked with the called flag and an indirect call graph edge
1545 describing the call is created. This is very simple for ordinary pointers
1546 represented in SSA but not-so-nice when it comes to member pointers. The
1547 ugly part of this function does nothing more than trying to match the
1548 pattern of such a call. An example of such a pattern is the gimple dump
1549 below, the call is on the last line:
1550
1551 <bb 2>:
1552 f$__delta_5 = f.__delta;
1553 f$__pfn_24 = f.__pfn;
1554
1555 or
1556 <bb 2>:
1557 f$__delta_5 = MEM[(struct *)&f];
1558 f$__pfn_24 = MEM[(struct *)&f + 4B];
1559
1560 and a few lines below:
1561
1562 <bb 5>
1563 D.2496_3 = (int) f$__pfn_24;
1564 D.2497_4 = D.2496_3 & 1;
1565 if (D.2497_4 != 0)
1566 goto <bb 3>;
1567 else
1568 goto <bb 4>;
1569
1570 <bb 6>:
1571 D.2500_7 = (unsigned int) f$__delta_5;
1572 D.2501_8 = &S + D.2500_7;
1573 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1574 D.2503_10 = *D.2502_9;
1575 D.2504_12 = f$__pfn_24 + -1;
1576 D.2505_13 = (unsigned int) D.2504_12;
1577 D.2506_14 = D.2503_10 + D.2505_13;
1578 D.2507_15 = *D.2506_14;
1579 iftmp.11_16 = (String:: *) D.2507_15;
1580
1581 <bb 7>:
1582 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1583 D.2500_19 = (unsigned int) f$__delta_5;
1584 D.2508_20 = &S + D.2500_19;
1585 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1586
1587 Such patterns are results of simple calls to a member pointer:
1588
1589 int doprinting (int (MyString::* f)(int) const)
1590 {
1591 MyString S ("somestring");
1592
1593 return (S.*f)(4);
1594 }
1595
1596 Moreover, the function also looks for called pointers loaded from aggregates
1597 passed by value or reference. */
1598
1599 static void
1600 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1601 struct ipa_node_params *info,
1602 struct param_analysis_info *parms_ainfo,
1603 gimple call, tree target)
1604 {
1605 gimple def;
1606 tree n1, n2;
1607 gimple d1, d2;
1608 tree rec, rec2, cond;
1609 gimple branch;
1610 int index;
1611 basic_block bb, virt_bb, join;
1612 HOST_WIDE_INT offset;
1613 bool by_ref;
1614
1615 if (SSA_NAME_IS_DEFAULT_DEF (target))
1616 {
1617 tree var = SSA_NAME_VAR (target);
1618 index = ipa_get_param_decl_index (info, var);
1619 if (index >= 0)
1620 ipa_note_param_call (node, index, call);
1621 return;
1622 }
1623
1624 def = SSA_NAME_DEF_STMT (target);
1625 if (gimple_assign_single_p (def)
1626 && ipa_load_from_parm_agg_1 (info, parms_ainfo, def,
1627 gimple_assign_rhs1 (def), &index, &offset,
1628 &by_ref))
1629 {
1630 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1631 cs->indirect_info->offset = offset;
1632 cs->indirect_info->agg_contents = 1;
1633 cs->indirect_info->by_ref = by_ref;
1634 return;
1635 }
1636
1637 /* Now we need to try to match the complex pattern of calling a member
1638 pointer. */
1639 if (gimple_code (def) != GIMPLE_PHI
1640 || gimple_phi_num_args (def) != 2
1641 || !POINTER_TYPE_P (TREE_TYPE (target))
1642 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1643 return;
1644
1645 /* First, we need to check whether one of these is a load from a member
1646 pointer that is a parameter to this function. */
1647 n1 = PHI_ARG_DEF (def, 0);
1648 n2 = PHI_ARG_DEF (def, 1);
1649 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1650 return;
1651 d1 = SSA_NAME_DEF_STMT (n1);
1652 d2 = SSA_NAME_DEF_STMT (n2);
1653
1654 join = gimple_bb (def);
1655 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1656 {
1657 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1658 return;
1659
1660 bb = EDGE_PRED (join, 0)->src;
1661 virt_bb = gimple_bb (d2);
1662 }
1663 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1664 {
1665 bb = EDGE_PRED (join, 1)->src;
1666 virt_bb = gimple_bb (d1);
1667 }
1668 else
1669 return;
1670
1671 /* Second, we need to check that the basic blocks are laid out in the way
1672 corresponding to the pattern. */
1673
1674 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1675 || single_pred (virt_bb) != bb
1676 || single_succ (virt_bb) != join)
1677 return;
1678
1679 /* Third, let's see that the branching is done depending on the least
1680 significant bit of the pfn. */
1681
1682 branch = last_stmt (bb);
1683 if (!branch || gimple_code (branch) != GIMPLE_COND)
1684 return;
1685
1686 if ((gimple_cond_code (branch) != NE_EXPR
1687 && gimple_cond_code (branch) != EQ_EXPR)
1688 || !integer_zerop (gimple_cond_rhs (branch)))
1689 return;
1690
1691 cond = gimple_cond_lhs (branch);
1692 if (!ipa_is_ssa_with_stmt_def (cond))
1693 return;
1694
1695 def = SSA_NAME_DEF_STMT (cond);
1696 if (!is_gimple_assign (def)
1697 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1698 || !integer_onep (gimple_assign_rhs2 (def)))
1699 return;
1700
1701 cond = gimple_assign_rhs1 (def);
1702 if (!ipa_is_ssa_with_stmt_def (cond))
1703 return;
1704
1705 def = SSA_NAME_DEF_STMT (cond);
1706
1707 if (is_gimple_assign (def)
1708 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1709 {
1710 cond = gimple_assign_rhs1 (def);
1711 if (!ipa_is_ssa_with_stmt_def (cond))
1712 return;
1713 def = SSA_NAME_DEF_STMT (cond);
1714 }
1715
1716 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1717 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1718 == ptrmemfunc_vbit_in_delta),
1719 NULL);
1720 if (rec != rec2)
1721 return;
1722
1723 index = ipa_get_param_decl_index (info, rec);
1724 if (index >= 0
1725 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1726 {
1727 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1728 cs->indirect_info->offset = offset;
1729 cs->indirect_info->agg_contents = 1;
1730 }
1731
1732 return;
1733 }
1734
1735 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1736 object referenced in the expression is a formal parameter of the caller
1737 (described by INFO), create a call note for the statement. */
1738
1739 static void
1740 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1741 struct ipa_node_params *info, gimple call,
1742 tree target)
1743 {
1744 struct cgraph_edge *cs;
1745 struct cgraph_indirect_call_info *ii;
1746 struct ipa_jump_func jfunc;
1747 tree obj = OBJ_TYPE_REF_OBJECT (target);
1748 int index;
1749 HOST_WIDE_INT anc_offset;
1750
1751 if (!flag_devirtualize)
1752 return;
1753
1754 if (TREE_CODE (obj) != SSA_NAME)
1755 return;
1756
1757 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1758 {
1759 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1760 return;
1761
1762 anc_offset = 0;
1763 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1764 gcc_assert (index >= 0);
1765 if (detect_type_change_ssa (obj, call, &jfunc))
1766 return;
1767 }
1768 else
1769 {
1770 gimple stmt = SSA_NAME_DEF_STMT (obj);
1771 tree expr;
1772
1773 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1774 if (!expr)
1775 return;
1776 index = ipa_get_param_decl_index (info,
1777 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1778 gcc_assert (index >= 0);
1779 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1780 return;
1781 }
1782
1783 cs = ipa_note_param_call (node, index, call);
1784 ii = cs->indirect_info;
1785 ii->offset = anc_offset;
1786 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1787 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1788 ii->polymorphic = 1;
1789 }
1790
1791 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1792 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1793 containing intermediate information about each formal parameter. */
1794
1795 static void
1796 ipa_analyze_call_uses (struct cgraph_node *node,
1797 struct ipa_node_params *info,
1798 struct param_analysis_info *parms_ainfo, gimple call)
1799 {
1800 tree target = gimple_call_fn (call);
1801
1802 if (!target)
1803 return;
1804 if (TREE_CODE (target) == SSA_NAME)
1805 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1806 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1807 ipa_analyze_virtual_call_uses (node, info, call, target);
1808 }
1809
1810
1811 /* Analyze the call statement STMT with respect to formal parameters (described
1812 in INFO) of caller given by NODE. Currently it only checks whether formal
1813 parameters are called. PARMS_AINFO is a pointer to a vector containing
1814 intermediate information about each formal parameter. */
1815
1816 static void
1817 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1818 struct param_analysis_info *parms_ainfo, gimple stmt)
1819 {
1820 if (is_gimple_call (stmt))
1821 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1822 }
1823
1824 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1825 If OP is a parameter declaration, mark it as used in the info structure
1826 passed in DATA. */
1827
1828 static bool
1829 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1830 tree op, void *data)
1831 {
1832 struct ipa_node_params *info = (struct ipa_node_params *) data;
1833
1834 op = get_base_address (op);
1835 if (op
1836 && TREE_CODE (op) == PARM_DECL)
1837 {
1838 int index = ipa_get_param_decl_index (info, op);
1839 gcc_assert (index >= 0);
1840 ipa_set_param_used (info, index, true);
1841 }
1842
1843 return false;
1844 }
1845
1846 /* Scan the function body of NODE and inspect the uses of formal parameters.
1847 Store the findings in various structures of the associated ipa_node_params
1848 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1849 vector containing intermediate information about each formal parameter. */
1850
1851 static void
1852 ipa_analyze_params_uses (struct cgraph_node *node,
1853 struct param_analysis_info *parms_ainfo)
1854 {
1855 tree decl = node->symbol.decl;
1856 basic_block bb;
1857 struct function *func;
1858 gimple_stmt_iterator gsi;
1859 struct ipa_node_params *info = IPA_NODE_REF (node);
1860 int i;
1861
1862 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1863 return;
1864
1865 for (i = 0; i < ipa_get_param_count (info); i++)
1866 {
1867 tree parm = ipa_get_param (info, i);
1868 tree ddef;
1869 /* For SSA regs see if parameter is used. For non-SSA we compute
1870 the flag during modification analysis. */
1871 if (is_gimple_reg (parm)
1872 && (ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1873 parm)) != NULL_TREE
1874 && !has_zero_uses (ddef))
1875 ipa_set_param_used (info, i, true);
1876 }
1877
1878 func = DECL_STRUCT_FUNCTION (decl);
1879 FOR_EACH_BB_FN (bb, func)
1880 {
1881 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1882 {
1883 gimple stmt = gsi_stmt (gsi);
1884
1885 if (is_gimple_debug (stmt))
1886 continue;
1887
1888 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1889 walk_stmt_load_store_addr_ops (stmt, info,
1890 visit_ref_for_mod_analysis,
1891 visit_ref_for_mod_analysis,
1892 visit_ref_for_mod_analysis);
1893 }
1894 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1895 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1896 visit_ref_for_mod_analysis,
1897 visit_ref_for_mod_analysis,
1898 visit_ref_for_mod_analysis);
1899 }
1900
1901 info->uses_analysis_done = 1;
1902 }
1903
1904 /* Initialize the array describing properties of of formal parameters
1905 of NODE, analyze their uses and compute jump functions associated
1906 with actual arguments of calls from within NODE. */
1907
1908 void
1909 ipa_analyze_node (struct cgraph_node *node)
1910 {
1911 struct ipa_node_params *info;
1912 struct param_analysis_info *parms_ainfo;
1913 int i, param_count;
1914
1915 ipa_check_create_node_params ();
1916 ipa_check_create_edge_args ();
1917 info = IPA_NODE_REF (node);
1918 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
1919 current_function_decl = node->symbol.decl;
1920 ipa_initialize_node_params (node);
1921
1922 param_count = ipa_get_param_count (info);
1923 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1924 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1925
1926 ipa_analyze_params_uses (node, parms_ainfo);
1927 ipa_compute_jump_functions (node, parms_ainfo);
1928
1929 for (i = 0; i < param_count; i++)
1930 {
1931 if (parms_ainfo[i].parm_visited_statements)
1932 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
1933 if (parms_ainfo[i].pt_visited_statements)
1934 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
1935 }
1936
1937 current_function_decl = NULL;
1938 pop_cfun ();
1939 }
1940
1941
1942 /* Update the jump function DST when the call graph edge corresponding to SRC is
1943 is being inlined, knowing that DST is of type ancestor and src of known
1944 type. */
1945
1946 static void
1947 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1948 struct ipa_jump_func *dst)
1949 {
1950 HOST_WIDE_INT combined_offset;
1951 tree combined_type;
1952
1953 combined_offset = ipa_get_jf_known_type_offset (src)
1954 + ipa_get_jf_ancestor_offset (dst);
1955 combined_type = ipa_get_jf_ancestor_type (dst);
1956
1957 ipa_set_jf_known_type (dst, combined_offset,
1958 ipa_get_jf_known_type_base_type (src),
1959 combined_type);
1960 }
1961
1962 /* Update the jump functions associated with call graph edge E when the call
1963 graph edge CS is being inlined, assuming that E->caller is already (possibly
1964 indirectly) inlined into CS->callee and that E has not been inlined. */
1965
1966 static void
1967 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1968 struct cgraph_edge *e)
1969 {
1970 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1971 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1972 int count = ipa_get_cs_argument_count (args);
1973 int i;
1974
1975 for (i = 0; i < count; i++)
1976 {
1977 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1978
1979 if (dst->type == IPA_JF_ANCESTOR)
1980 {
1981 struct ipa_jump_func *src;
1982 int dst_fid = dst->value.ancestor.formal_id;
1983
1984 /* Variable number of arguments can cause havoc if we try to access
1985 one that does not exist in the inlined edge. So make sure we
1986 don't. */
1987 if (dst_fid >= ipa_get_cs_argument_count (top))
1988 {
1989 dst->type = IPA_JF_UNKNOWN;
1990 continue;
1991 }
1992
1993 src = ipa_get_ith_jump_func (top, dst_fid);
1994
1995 if (src->agg.items
1996 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
1997 {
1998 struct ipa_agg_jf_item *item;
1999 int j;
2000
2001 /* Currently we do not produce clobber aggregate jump functions,
2002 replace with merging when we do. */
2003 gcc_assert (!dst->agg.items);
2004
2005 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc, src->agg.items);
2006 dst->agg.by_ref = src->agg.by_ref;
2007 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, dst->agg.items, j, item)
2008 item->offset -= dst->value.ancestor.offset;
2009 }
2010
2011 if (src->type == IPA_JF_KNOWN_TYPE)
2012 combine_known_type_and_ancestor_jfs (src, dst);
2013 else if (src->type == IPA_JF_PASS_THROUGH
2014 && src->value.pass_through.operation == NOP_EXPR)
2015 {
2016 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2017 dst->value.ancestor.agg_preserved &=
2018 src->value.pass_through.agg_preserved;
2019 }
2020 else if (src->type == IPA_JF_ANCESTOR)
2021 {
2022 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2023 dst->value.ancestor.offset += src->value.ancestor.offset;
2024 dst->value.ancestor.agg_preserved &=
2025 src->value.ancestor.agg_preserved;
2026 }
2027 else
2028 dst->type = IPA_JF_UNKNOWN;
2029 }
2030 else if (dst->type == IPA_JF_PASS_THROUGH)
2031 {
2032 struct ipa_jump_func *src;
2033 /* We must check range due to calls with variable number of arguments
2034 and we cannot combine jump functions with operations. */
2035 if (dst->value.pass_through.operation == NOP_EXPR
2036 && (dst->value.pass_through.formal_id
2037 < ipa_get_cs_argument_count (top)))
2038 {
2039 bool agg_p;
2040 int dst_fid = dst->value.pass_through.formal_id;
2041 src = ipa_get_ith_jump_func (top, dst_fid);
2042 agg_p = dst->value.pass_through.agg_preserved;
2043
2044 dst->type = src->type;
2045 dst->value = src->value;
2046
2047 if (src->agg.items
2048 && (agg_p || !src->agg.by_ref))
2049 {
2050 /* Currently we do not produce clobber aggregate jump
2051 functions, replace with merging when we do. */
2052 gcc_assert (!dst->agg.items);
2053
2054 dst->agg.by_ref = src->agg.by_ref;
2055 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc,
2056 src->agg.items);
2057 }
2058
2059 if (!agg_p)
2060 {
2061 if (dst->type == IPA_JF_PASS_THROUGH)
2062 dst->value.pass_through.agg_preserved = false;
2063 else if (dst->type == IPA_JF_ANCESTOR)
2064 dst->value.ancestor.agg_preserved = false;
2065 }
2066 }
2067 else
2068 dst->type = IPA_JF_UNKNOWN;
2069 }
2070 }
2071 }
2072
2073 /* If TARGET is an addr_expr of a function declaration, make it the destination
2074 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2075
2076 struct cgraph_edge *
2077 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2078 {
2079 struct cgraph_node *callee;
2080
2081 if (TREE_CODE (target) == ADDR_EXPR)
2082 target = TREE_OPERAND (target, 0);
2083 if (TREE_CODE (target) != FUNCTION_DECL)
2084 return NULL;
2085 callee = cgraph_get_node (target);
2086 if (!callee)
2087 return NULL;
2088 ipa_check_create_node_params ();
2089
2090 /* We can not make edges to inline clones. It is bug that someone removed
2091 the cgraph node too early. */
2092 gcc_assert (!callee->global.inlined_to);
2093
2094 cgraph_make_edge_direct (ie, callee);
2095 if (dump_file)
2096 {
2097 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2098 "(%s/%i -> %s/%i), for stmt ",
2099 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2100 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
2101 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
2102 if (ie->call_stmt)
2103 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2104 else
2105 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2106 }
2107 callee = cgraph_function_or_thunk_node (callee, NULL);
2108
2109 return ie;
2110 }
2111
2112 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2113 return NULL if there is not any. BY_REF specifies whether the value has to
2114 be passed by reference or by value. */
2115
2116 tree
2117 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2118 HOST_WIDE_INT offset, bool by_ref)
2119 {
2120 struct ipa_agg_jf_item *item;
2121 int i;
2122
2123 if (by_ref != agg->by_ref)
2124 return NULL;
2125
2126 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, agg->items, i, item)
2127 {
2128 if (item->offset == offset)
2129 {
2130 /* Currently we do not have clobber values, return NULL for them once
2131 we do. */
2132 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2133 return item->value;
2134 }
2135 else if (item->offset > offset)
2136 return NULL;
2137 }
2138 return NULL;
2139 }
2140
2141 /* Try to find a destination for indirect edge IE that corresponds to a simple
2142 call or a call of a member function pointer and where the destination is a
2143 pointer formal parameter described by jump function JFUNC. If it can be
2144 determined, return the newly direct edge, otherwise return NULL. */
2145
2146 static struct cgraph_edge *
2147 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2148 struct ipa_jump_func *jfunc)
2149 {
2150 tree target;
2151
2152 if (ie->indirect_info->agg_contents)
2153 {
2154 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2155 ie->indirect_info->offset,
2156 ie->indirect_info->by_ref);
2157 if (!target)
2158 return NULL;
2159 }
2160 else
2161 {
2162 if (jfunc->type != IPA_JF_CONST)
2163 return NULL;
2164 target = ipa_get_jf_constant (jfunc);
2165 }
2166 return ipa_make_edge_direct_to_target (ie, target);
2167 }
2168
2169 /* Try to find a destination for indirect edge IE that corresponds to a
2170 virtual call based on a formal parameter which is described by jump
2171 function JFUNC and if it can be determined, make it direct and return the
2172 direct edge. Otherwise, return NULL. */
2173
2174 static struct cgraph_edge *
2175 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2176 struct ipa_jump_func *jfunc)
2177 {
2178 tree binfo, target;
2179
2180 if (jfunc->type != IPA_JF_KNOWN_TYPE)
2181 return NULL;
2182
2183 binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
2184 gcc_checking_assert (binfo);
2185 binfo = get_binfo_at_offset (binfo, ipa_get_jf_known_type_offset (jfunc)
2186 + ie->indirect_info->offset,
2187 ie->indirect_info->otr_type);
2188 if (binfo)
2189 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2190 binfo);
2191 else
2192 return NULL;
2193
2194 if (target)
2195 return ipa_make_edge_direct_to_target (ie, target);
2196 else
2197 return NULL;
2198 }
2199
2200 /* Update the param called notes associated with NODE when CS is being inlined,
2201 assuming NODE is (potentially indirectly) inlined into CS->callee.
2202 Moreover, if the callee is discovered to be constant, create a new cgraph
2203 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2204 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2205
2206 static bool
2207 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2208 struct cgraph_node *node,
2209 VEC (cgraph_edge_p, heap) **new_edges)
2210 {
2211 struct ipa_edge_args *top;
2212 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2213 bool res = false;
2214
2215 ipa_check_create_edge_args ();
2216 top = IPA_EDGE_REF (cs);
2217
2218 for (ie = node->indirect_calls; ie; ie = next_ie)
2219 {
2220 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2221 struct ipa_jump_func *jfunc;
2222 int param_index;
2223
2224 next_ie = ie->next_callee;
2225
2226 if (ici->param_index == -1)
2227 continue;
2228
2229 /* We must check range due to calls with variable number of arguments: */
2230 if (ici->param_index >= ipa_get_cs_argument_count (top))
2231 {
2232 ici->param_index = -1;
2233 continue;
2234 }
2235
2236 param_index = ici->param_index;
2237 jfunc = ipa_get_ith_jump_func (top, param_index);
2238 if (jfunc->type == IPA_JF_PASS_THROUGH
2239 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2240 {
2241 if (ici->agg_contents
2242 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2243 ici->param_index = -1;
2244 else
2245 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2246 }
2247 else if (jfunc->type == IPA_JF_ANCESTOR)
2248 {
2249 if (ici->agg_contents
2250 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2251 ici->param_index = -1;
2252 else
2253 {
2254 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2255 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2256 }
2257 }
2258 else
2259 /* Either we can find a destination for this edge now or never. */
2260 ici->param_index = -1;
2261
2262 if (!flag_indirect_inlining)
2263 continue;
2264
2265 if (ici->polymorphic)
2266 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
2267 else
2268 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
2269
2270 if (new_direct_edge)
2271 {
2272 new_direct_edge->indirect_inlining_edge = 1;
2273 if (new_direct_edge->call_stmt)
2274 new_direct_edge->call_stmt_cannot_inline_p
2275 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
2276 new_direct_edge->callee->symbol.decl);
2277 if (new_edges)
2278 {
2279 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
2280 new_direct_edge);
2281 top = IPA_EDGE_REF (cs);
2282 res = true;
2283 }
2284 }
2285 }
2286
2287 return res;
2288 }
2289
2290 /* Recursively traverse subtree of NODE (including node) made of inlined
2291 cgraph_edges when CS has been inlined and invoke
2292 update_indirect_edges_after_inlining on all nodes and
2293 update_jump_functions_after_inlining on all non-inlined edges that lead out
2294 of this subtree. Newly discovered indirect edges will be added to
2295 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2296 created. */
2297
2298 static bool
2299 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2300 struct cgraph_node *node,
2301 VEC (cgraph_edge_p, heap) **new_edges)
2302 {
2303 struct cgraph_edge *e;
2304 bool res;
2305
2306 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2307
2308 for (e = node->callees; e; e = e->next_callee)
2309 if (!e->inline_failed)
2310 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2311 else
2312 update_jump_functions_after_inlining (cs, e);
2313 for (e = node->indirect_calls; e; e = e->next_callee)
2314 update_jump_functions_after_inlining (cs, e);
2315
2316 return res;
2317 }
2318
2319 /* Update jump functions and call note functions on inlining the call site CS.
2320 CS is expected to lead to a node already cloned by
2321 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2322 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2323 created. */
2324
2325 bool
2326 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2327 VEC (cgraph_edge_p, heap) **new_edges)
2328 {
2329 bool changed;
2330 /* Do nothing if the preparation phase has not been carried out yet
2331 (i.e. during early inlining). */
2332 if (!ipa_node_params_vector)
2333 return false;
2334 gcc_assert (ipa_edge_args_vector);
2335
2336 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2337
2338 /* We do not keep jump functions of inlined edges up to date. Better to free
2339 them so we do not access them accidentally. */
2340 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2341 return changed;
2342 }
2343
2344 /* Frees all dynamically allocated structures that the argument info points
2345 to. */
2346
2347 void
2348 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2349 {
2350 if (args->jump_functions)
2351 ggc_free (args->jump_functions);
2352
2353 memset (args, 0, sizeof (*args));
2354 }
2355
2356 /* Free all ipa_edge structures. */
2357
2358 void
2359 ipa_free_all_edge_args (void)
2360 {
2361 int i;
2362 struct ipa_edge_args *args;
2363
2364 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
2365 ipa_free_edge_args_substructures (args);
2366
2367 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
2368 ipa_edge_args_vector = NULL;
2369 }
2370
2371 /* Frees all dynamically allocated structures that the param info points
2372 to. */
2373
2374 void
2375 ipa_free_node_params_substructures (struct ipa_node_params *info)
2376 {
2377 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
2378 free (info->lattices);
2379 /* Lattice values and their sources are deallocated with their alocation
2380 pool. */
2381 VEC_free (tree, heap, info->known_vals);
2382 memset (info, 0, sizeof (*info));
2383 }
2384
2385 /* Free all ipa_node_params structures. */
2386
2387 void
2388 ipa_free_all_node_params (void)
2389 {
2390 int i;
2391 struct ipa_node_params *info;
2392
2393 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
2394 ipa_free_node_params_substructures (info);
2395
2396 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
2397 ipa_node_params_vector = NULL;
2398 }
2399
2400 /* Hook that is called by cgraph.c when an edge is removed. */
2401
2402 static void
2403 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2404 {
2405 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2406 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
2407 <= (unsigned)cs->uid)
2408 return;
2409 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2410 }
2411
2412 /* Hook that is called by cgraph.c when a node is removed. */
2413
2414 static void
2415 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2416 {
2417 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2418 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
2419 <= (unsigned)node->uid)
2420 return;
2421 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2422 }
2423
2424 /* Hook that is called by cgraph.c when an edge is duplicated. */
2425
2426 static void
2427 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2428 __attribute__((unused)) void *data)
2429 {
2430 struct ipa_edge_args *old_args, *new_args;
2431 unsigned int i;
2432
2433 ipa_check_create_edge_args ();
2434
2435 old_args = IPA_EDGE_REF (src);
2436 new_args = IPA_EDGE_REF (dst);
2437
2438 new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
2439 old_args->jump_functions);
2440
2441 for (i = 0; i < VEC_length (ipa_jump_func_t, old_args->jump_functions); i++)
2442 VEC_index (ipa_jump_func_t, new_args->jump_functions, i).agg.items
2443 = VEC_copy (ipa_agg_jf_item_t, gc,
2444 VEC_index (ipa_jump_func_t,
2445 old_args->jump_functions, i).agg.items);
2446 }
2447
2448 /* Hook that is called by cgraph.c when a node is duplicated. */
2449
2450 static void
2451 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2452 ATTRIBUTE_UNUSED void *data)
2453 {
2454 struct ipa_node_params *old_info, *new_info;
2455
2456 ipa_check_create_node_params ();
2457 old_info = IPA_NODE_REF (src);
2458 new_info = IPA_NODE_REF (dst);
2459
2460 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
2461 old_info->descriptors);
2462 new_info->lattices = NULL;
2463 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2464
2465 new_info->uses_analysis_done = old_info->uses_analysis_done;
2466 new_info->node_enqueued = old_info->node_enqueued;
2467 }
2468
2469
2470 /* Analyze newly added function into callgraph. */
2471
2472 static void
2473 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2474 {
2475 ipa_analyze_node (node);
2476 }
2477
2478 /* Register our cgraph hooks if they are not already there. */
2479
2480 void
2481 ipa_register_cgraph_hooks (void)
2482 {
2483 if (!edge_removal_hook_holder)
2484 edge_removal_hook_holder =
2485 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2486 if (!node_removal_hook_holder)
2487 node_removal_hook_holder =
2488 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2489 if (!edge_duplication_hook_holder)
2490 edge_duplication_hook_holder =
2491 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2492 if (!node_duplication_hook_holder)
2493 node_duplication_hook_holder =
2494 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2495 function_insertion_hook_holder =
2496 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2497 }
2498
2499 /* Unregister our cgraph hooks if they are not already there. */
2500
2501 static void
2502 ipa_unregister_cgraph_hooks (void)
2503 {
2504 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2505 edge_removal_hook_holder = NULL;
2506 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2507 node_removal_hook_holder = NULL;
2508 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2509 edge_duplication_hook_holder = NULL;
2510 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2511 node_duplication_hook_holder = NULL;
2512 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2513 function_insertion_hook_holder = NULL;
2514 }
2515
2516 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2517 longer needed after ipa-cp. */
2518
2519 void
2520 ipa_free_all_structures_after_ipa_cp (void)
2521 {
2522 if (!optimize)
2523 {
2524 ipa_free_all_edge_args ();
2525 ipa_free_all_node_params ();
2526 free_alloc_pool (ipcp_sources_pool);
2527 free_alloc_pool (ipcp_values_pool);
2528 ipa_unregister_cgraph_hooks ();
2529 }
2530 }
2531
2532 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2533 longer needed after indirect inlining. */
2534
2535 void
2536 ipa_free_all_structures_after_iinln (void)
2537 {
2538 ipa_free_all_edge_args ();
2539 ipa_free_all_node_params ();
2540 ipa_unregister_cgraph_hooks ();
2541 if (ipcp_sources_pool)
2542 free_alloc_pool (ipcp_sources_pool);
2543 if (ipcp_values_pool)
2544 free_alloc_pool (ipcp_values_pool);
2545 }
2546
2547 /* Print ipa_tree_map data structures of all functions in the
2548 callgraph to F. */
2549
2550 void
2551 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2552 {
2553 int i, count;
2554 tree temp;
2555 struct ipa_node_params *info;
2556
2557 if (!node->analyzed)
2558 return;
2559 info = IPA_NODE_REF (node);
2560 fprintf (f, " function %s parameter descriptors:\n",
2561 cgraph_node_name (node));
2562 count = ipa_get_param_count (info);
2563 for (i = 0; i < count; i++)
2564 {
2565 temp = ipa_get_param (info, i);
2566 if (TREE_CODE (temp) == PARM_DECL)
2567 fprintf (f, " param %d : %s", i,
2568 (DECL_NAME (temp)
2569 ? (*lang_hooks.decl_printable_name) (temp, 2)
2570 : "(unnamed)"));
2571 if (ipa_is_param_used (info, i))
2572 fprintf (f, " used");
2573 fprintf (f, "\n");
2574 }
2575 }
2576
2577 /* Print ipa_tree_map data structures of all functions in the
2578 callgraph to F. */
2579
2580 void
2581 ipa_print_all_params (FILE * f)
2582 {
2583 struct cgraph_node *node;
2584
2585 fprintf (f, "\nFunction parameters:\n");
2586 FOR_EACH_FUNCTION (node)
2587 ipa_print_node_params (f, node);
2588 }
2589
2590 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2591
2592 VEC(tree, heap) *
2593 ipa_get_vector_of_formal_parms (tree fndecl)
2594 {
2595 VEC(tree, heap) *args;
2596 int count;
2597 tree parm;
2598
2599 count = count_formal_params (fndecl);
2600 args = VEC_alloc (tree, heap, count);
2601 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2602 VEC_quick_push (tree, args, parm);
2603
2604 return args;
2605 }
2606
2607 /* Return a heap allocated vector containing types of formal parameters of
2608 function type FNTYPE. */
2609
2610 static inline VEC(tree, heap) *
2611 get_vector_of_formal_parm_types (tree fntype)
2612 {
2613 VEC(tree, heap) *types;
2614 int count = 0;
2615 tree t;
2616
2617 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2618 count++;
2619
2620 types = VEC_alloc (tree, heap, count);
2621 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2622 VEC_quick_push (tree, types, TREE_VALUE (t));
2623
2624 return types;
2625 }
2626
2627 /* Modify the function declaration FNDECL and its type according to the plan in
2628 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2629 to reflect the actual parameters being modified which are determined by the
2630 base_index field. */
2631
2632 void
2633 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2634 const char *synth_parm_prefix)
2635 {
2636 VEC(tree, heap) *oparms, *otypes;
2637 tree orig_type, new_type = NULL;
2638 tree old_arg_types, t, new_arg_types = NULL;
2639 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2640 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2641 tree new_reversed = NULL;
2642 bool care_for_types, last_parm_void;
2643
2644 if (!synth_parm_prefix)
2645 synth_parm_prefix = "SYNTH";
2646
2647 oparms = ipa_get_vector_of_formal_parms (fndecl);
2648 orig_type = TREE_TYPE (fndecl);
2649 old_arg_types = TYPE_ARG_TYPES (orig_type);
2650
2651 /* The following test is an ugly hack, some functions simply don't have any
2652 arguments in their type. This is probably a bug but well... */
2653 care_for_types = (old_arg_types != NULL_TREE);
2654 if (care_for_types)
2655 {
2656 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2657 == void_type_node);
2658 otypes = get_vector_of_formal_parm_types (orig_type);
2659 if (last_parm_void)
2660 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2661 else
2662 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2663 }
2664 else
2665 {
2666 last_parm_void = false;
2667 otypes = NULL;
2668 }
2669
2670 for (i = 0; i < len; i++)
2671 {
2672 struct ipa_parm_adjustment *adj;
2673 gcc_assert (link);
2674
2675 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2676 parm = VEC_index (tree, oparms, adj->base_index);
2677 adj->base = parm;
2678
2679 if (adj->copy_param)
2680 {
2681 if (care_for_types)
2682 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2683 adj->base_index),
2684 new_arg_types);
2685 *link = parm;
2686 link = &DECL_CHAIN (parm);
2687 }
2688 else if (!adj->remove_param)
2689 {
2690 tree new_parm;
2691 tree ptype;
2692
2693 if (adj->by_ref)
2694 ptype = build_pointer_type (adj->type);
2695 else
2696 ptype = adj->type;
2697
2698 if (care_for_types)
2699 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2700
2701 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2702 ptype);
2703 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2704
2705 DECL_ARTIFICIAL (new_parm) = 1;
2706 DECL_ARG_TYPE (new_parm) = ptype;
2707 DECL_CONTEXT (new_parm) = fndecl;
2708 TREE_USED (new_parm) = 1;
2709 DECL_IGNORED_P (new_parm) = 1;
2710 layout_decl (new_parm, 0);
2711
2712 adj->base = parm;
2713 adj->reduction = new_parm;
2714
2715 *link = new_parm;
2716
2717 link = &DECL_CHAIN (new_parm);
2718 }
2719 }
2720
2721 *link = NULL_TREE;
2722
2723 if (care_for_types)
2724 {
2725 new_reversed = nreverse (new_arg_types);
2726 if (last_parm_void)
2727 {
2728 if (new_reversed)
2729 TREE_CHAIN (new_arg_types) = void_list_node;
2730 else
2731 new_reversed = void_list_node;
2732 }
2733 }
2734
2735 /* Use copy_node to preserve as much as possible from original type
2736 (debug info, attribute lists etc.)
2737 Exception is METHOD_TYPEs must have THIS argument.
2738 When we are asked to remove it, we need to build new FUNCTION_TYPE
2739 instead. */
2740 if (TREE_CODE (orig_type) != METHOD_TYPE
2741 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0).copy_param
2742 && VEC_index (ipa_parm_adjustment_t, adjustments, 0).base_index == 0))
2743 {
2744 new_type = build_distinct_type_copy (orig_type);
2745 TYPE_ARG_TYPES (new_type) = new_reversed;
2746 }
2747 else
2748 {
2749 new_type
2750 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2751 new_reversed));
2752 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2753 DECL_VINDEX (fndecl) = NULL_TREE;
2754 }
2755
2756 /* When signature changes, we need to clear builtin info. */
2757 if (DECL_BUILT_IN (fndecl))
2758 {
2759 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2760 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2761 }
2762
2763 /* This is a new type, not a copy of an old type. Need to reassociate
2764 variants. We can handle everything except the main variant lazily. */
2765 t = TYPE_MAIN_VARIANT (orig_type);
2766 if (orig_type != t)
2767 {
2768 TYPE_MAIN_VARIANT (new_type) = t;
2769 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2770 TYPE_NEXT_VARIANT (t) = new_type;
2771 }
2772 else
2773 {
2774 TYPE_MAIN_VARIANT (new_type) = new_type;
2775 TYPE_NEXT_VARIANT (new_type) = NULL;
2776 }
2777
2778 TREE_TYPE (fndecl) = new_type;
2779 DECL_VIRTUAL_P (fndecl) = 0;
2780 if (otypes)
2781 VEC_free (tree, heap, otypes);
2782 VEC_free (tree, heap, oparms);
2783 }
2784
2785 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2786 If this is a directly recursive call, CS must be NULL. Otherwise it must
2787 contain the corresponding call graph edge. */
2788
2789 void
2790 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2791 ipa_parm_adjustment_vec adjustments)
2792 {
2793 VEC(tree, heap) *vargs;
2794 VEC(tree, gc) **debug_args = NULL;
2795 gimple new_stmt;
2796 gimple_stmt_iterator gsi;
2797 tree callee_decl;
2798 int i, len;
2799
2800 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2801 vargs = VEC_alloc (tree, heap, len);
2802 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
2803
2804 gsi = gsi_for_stmt (stmt);
2805 for (i = 0; i < len; i++)
2806 {
2807 struct ipa_parm_adjustment *adj;
2808
2809 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2810
2811 if (adj->copy_param)
2812 {
2813 tree arg = gimple_call_arg (stmt, adj->base_index);
2814
2815 VEC_quick_push (tree, vargs, arg);
2816 }
2817 else if (!adj->remove_param)
2818 {
2819 tree expr, base, off;
2820 location_t loc;
2821
2822 /* We create a new parameter out of the value of the old one, we can
2823 do the following kind of transformations:
2824
2825 - A scalar passed by reference is converted to a scalar passed by
2826 value. (adj->by_ref is false and the type of the original
2827 actual argument is a pointer to a scalar).
2828
2829 - A part of an aggregate is passed instead of the whole aggregate.
2830 The part can be passed either by value or by reference, this is
2831 determined by value of adj->by_ref. Moreover, the code below
2832 handles both situations when the original aggregate is passed by
2833 value (its type is not a pointer) and when it is passed by
2834 reference (it is a pointer to an aggregate).
2835
2836 When the new argument is passed by reference (adj->by_ref is true)
2837 it must be a part of an aggregate and therefore we form it by
2838 simply taking the address of a reference inside the original
2839 aggregate. */
2840
2841 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2842 base = gimple_call_arg (stmt, adj->base_index);
2843 loc = EXPR_LOCATION (base);
2844
2845 if (TREE_CODE (base) != ADDR_EXPR
2846 && POINTER_TYPE_P (TREE_TYPE (base)))
2847 off = build_int_cst (adj->alias_ptr_type,
2848 adj->offset / BITS_PER_UNIT);
2849 else
2850 {
2851 HOST_WIDE_INT base_offset;
2852 tree prev_base;
2853
2854 if (TREE_CODE (base) == ADDR_EXPR)
2855 base = TREE_OPERAND (base, 0);
2856 prev_base = base;
2857 base = get_addr_base_and_unit_offset (base, &base_offset);
2858 /* Aggregate arguments can have non-invariant addresses. */
2859 if (!base)
2860 {
2861 base = build_fold_addr_expr (prev_base);
2862 off = build_int_cst (adj->alias_ptr_type,
2863 adj->offset / BITS_PER_UNIT);
2864 }
2865 else if (TREE_CODE (base) == MEM_REF)
2866 {
2867 off = build_int_cst (adj->alias_ptr_type,
2868 base_offset
2869 + adj->offset / BITS_PER_UNIT);
2870 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2871 off);
2872 base = TREE_OPERAND (base, 0);
2873 }
2874 else
2875 {
2876 off = build_int_cst (adj->alias_ptr_type,
2877 base_offset
2878 + adj->offset / BITS_PER_UNIT);
2879 base = build_fold_addr_expr (base);
2880 }
2881 }
2882
2883 if (!adj->by_ref)
2884 {
2885 tree type = adj->type;
2886 unsigned int align;
2887 unsigned HOST_WIDE_INT misalign;
2888
2889 get_pointer_alignment_1 (base, &align, &misalign);
2890 misalign += (tree_to_double_int (off)
2891 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
2892 * BITS_PER_UNIT);
2893 misalign = misalign & (align - 1);
2894 if (misalign != 0)
2895 align = (misalign & -misalign);
2896 if (align < TYPE_ALIGN (type))
2897 type = build_aligned_type (type, align);
2898 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2899 }
2900 else
2901 {
2902 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2903 expr = build_fold_addr_expr (expr);
2904 }
2905
2906 expr = force_gimple_operand_gsi (&gsi, expr,
2907 adj->by_ref
2908 || is_gimple_reg_type (adj->type),
2909 NULL, true, GSI_SAME_STMT);
2910 VEC_quick_push (tree, vargs, expr);
2911 }
2912 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2913 {
2914 unsigned int ix;
2915 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2916 gimple def_temp;
2917
2918 arg = gimple_call_arg (stmt, adj->base_index);
2919 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2920 {
2921 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2922 continue;
2923 arg = fold_convert_loc (gimple_location (stmt),
2924 TREE_TYPE (origin), arg);
2925 }
2926 if (debug_args == NULL)
2927 debug_args = decl_debug_args_insert (callee_decl);
2928 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
2929 if (ddecl == origin)
2930 {
2931 ddecl = VEC_index (tree, *debug_args, ix + 1);
2932 break;
2933 }
2934 if (ddecl == NULL)
2935 {
2936 ddecl = make_node (DEBUG_EXPR_DECL);
2937 DECL_ARTIFICIAL (ddecl) = 1;
2938 TREE_TYPE (ddecl) = TREE_TYPE (origin);
2939 DECL_MODE (ddecl) = DECL_MODE (origin);
2940
2941 VEC_safe_push (tree, gc, *debug_args, origin);
2942 VEC_safe_push (tree, gc, *debug_args, ddecl);
2943 }
2944 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
2945 stmt);
2946 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
2947 }
2948 }
2949
2950 if (dump_file && (dump_flags & TDF_DETAILS))
2951 {
2952 fprintf (dump_file, "replacing stmt:");
2953 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2954 }
2955
2956 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2957 VEC_free (tree, heap, vargs);
2958 if (gimple_call_lhs (stmt))
2959 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2960
2961 gimple_set_block (new_stmt, gimple_block (stmt));
2962 if (gimple_has_location (stmt))
2963 gimple_set_location (new_stmt, gimple_location (stmt));
2964 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2965 gimple_call_copy_flags (new_stmt, stmt);
2966
2967 if (dump_file && (dump_flags & TDF_DETAILS))
2968 {
2969 fprintf (dump_file, "with stmt:");
2970 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2971 fprintf (dump_file, "\n");
2972 }
2973 gsi_replace (&gsi, new_stmt, true);
2974 if (cs)
2975 cgraph_set_call_stmt (cs, new_stmt);
2976 update_ssa (TODO_update_ssa);
2977 free_dominance_info (CDI_DOMINATORS);
2978 }
2979
2980 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2981
2982 static bool
2983 index_in_adjustments_multiple_times_p (int base_index,
2984 ipa_parm_adjustment_vec adjustments)
2985 {
2986 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2987 bool one = false;
2988
2989 for (i = 0; i < len; i++)
2990 {
2991 struct ipa_parm_adjustment *adj;
2992 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2993
2994 if (adj->base_index == base_index)
2995 {
2996 if (one)
2997 return true;
2998 else
2999 one = true;
3000 }
3001 }
3002 return false;
3003 }
3004
3005
3006 /* Return adjustments that should have the same effect on function parameters
3007 and call arguments as if they were first changed according to adjustments in
3008 INNER and then by adjustments in OUTER. */
3009
3010 ipa_parm_adjustment_vec
3011 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3012 ipa_parm_adjustment_vec outer)
3013 {
3014 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
3015 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
3016 int removals = 0;
3017 ipa_parm_adjustment_vec adjustments, tmp;
3018
3019 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
3020 for (i = 0; i < inlen; i++)
3021 {
3022 struct ipa_parm_adjustment *n;
3023 n = &VEC_index (ipa_parm_adjustment_t, inner, i);
3024
3025 if (n->remove_param)
3026 removals++;
3027 else
3028 VEC_quick_push (ipa_parm_adjustment_t, tmp, *n);
3029 }
3030
3031 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
3032 for (i = 0; i < outlen; i++)
3033 {
3034 struct ipa_parm_adjustment r;
3035 struct ipa_parm_adjustment *out = &VEC_index (ipa_parm_adjustment_t,
3036 outer, i);
3037 struct ipa_parm_adjustment *in = &VEC_index (ipa_parm_adjustment_t, tmp,
3038 out->base_index);
3039
3040 memset (&r, 0, sizeof (r));
3041 gcc_assert (!in->remove_param);
3042 if (out->remove_param)
3043 {
3044 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3045 {
3046 r.remove_param = true;
3047 VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
3048 }
3049 continue;
3050 }
3051
3052 r.base_index = in->base_index;
3053 r.type = out->type;
3054
3055 /* FIXME: Create nonlocal value too. */
3056
3057 if (in->copy_param && out->copy_param)
3058 r.copy_param = true;
3059 else if (in->copy_param)
3060 r.offset = out->offset;
3061 else if (out->copy_param)
3062 r.offset = in->offset;
3063 else
3064 r.offset = in->offset + out->offset;
3065 VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
3066 }
3067
3068 for (i = 0; i < inlen; i++)
3069 {
3070 struct ipa_parm_adjustment *n = &VEC_index (ipa_parm_adjustment_t,
3071 inner, i);
3072
3073 if (n->remove_param)
3074 VEC_quick_push (ipa_parm_adjustment_t, adjustments, *n);
3075 }
3076
3077 VEC_free (ipa_parm_adjustment_t, heap, tmp);
3078 return adjustments;
3079 }
3080
3081 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3082 friendly way, assuming they are meant to be applied to FNDECL. */
3083
3084 void
3085 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3086 tree fndecl)
3087 {
3088 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
3089 bool first = true;
3090 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
3091
3092 fprintf (file, "IPA param adjustments: ");
3093 for (i = 0; i < len; i++)
3094 {
3095 struct ipa_parm_adjustment *adj;
3096 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
3097
3098 if (!first)
3099 fprintf (file, " ");
3100 else
3101 first = false;
3102
3103 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3104 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
3105 if (adj->base)
3106 {
3107 fprintf (file, ", base: ");
3108 print_generic_expr (file, adj->base, 0);
3109 }
3110 if (adj->reduction)
3111 {
3112 fprintf (file, ", reduction: ");
3113 print_generic_expr (file, adj->reduction, 0);
3114 }
3115 if (adj->new_ssa_base)
3116 {
3117 fprintf (file, ", new_ssa_base: ");
3118 print_generic_expr (file, adj->new_ssa_base, 0);
3119 }
3120
3121 if (adj->copy_param)
3122 fprintf (file, ", copy_param");
3123 else if (adj->remove_param)
3124 fprintf (file, ", remove_param");
3125 else
3126 fprintf (file, ", offset %li", (long) adj->offset);
3127 if (adj->by_ref)
3128 fprintf (file, ", by_ref");
3129 print_node_brief (file, ", type: ", adj->type, 0);
3130 fprintf (file, "\n");
3131 }
3132 VEC_free (tree, heap, parms);
3133 }
3134
3135 /* Stream out jump function JUMP_FUNC to OB. */
3136
3137 static void
3138 ipa_write_jump_function (struct output_block *ob,
3139 struct ipa_jump_func *jump_func)
3140 {
3141 struct ipa_agg_jf_item *item;
3142 struct bitpack_d bp;
3143 int i, count;
3144
3145 streamer_write_uhwi (ob, jump_func->type);
3146 switch (jump_func->type)
3147 {
3148 case IPA_JF_UNKNOWN:
3149 break;
3150 case IPA_JF_KNOWN_TYPE:
3151 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3152 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3153 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3154 break;
3155 case IPA_JF_CONST:
3156 gcc_assert (
3157 IS_UNKNOWN_LOCATION (EXPR_LOCATION (jump_func->value.constant)));
3158 stream_write_tree (ob, jump_func->value.constant, true);
3159 break;
3160 case IPA_JF_PASS_THROUGH:
3161 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3162 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3163 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3164 bp = bitpack_create (ob->main_stream);
3165 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3166 streamer_write_bitpack (&bp);
3167 break;
3168 case IPA_JF_ANCESTOR:
3169 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3170 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3171 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3172 bp = bitpack_create (ob->main_stream);
3173 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3174 streamer_write_bitpack (&bp);
3175 break;
3176 }
3177
3178 count = VEC_length (ipa_agg_jf_item_t, jump_func->agg.items);
3179 streamer_write_uhwi (ob, count);
3180 if (count)
3181 {
3182 bp = bitpack_create (ob->main_stream);
3183 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3184 streamer_write_bitpack (&bp);
3185 }
3186
3187 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items, i, item)
3188 {
3189 streamer_write_uhwi (ob, item->offset);
3190 stream_write_tree (ob, item->value, true);
3191 }
3192 }
3193
3194 /* Read in jump function JUMP_FUNC from IB. */
3195
3196 static void
3197 ipa_read_jump_function (struct lto_input_block *ib,
3198 struct ipa_jump_func *jump_func,
3199 struct data_in *data_in)
3200 {
3201 struct bitpack_d bp;
3202 int i, count;
3203
3204 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
3205 switch (jump_func->type)
3206 {
3207 case IPA_JF_UNKNOWN:
3208 break;
3209 case IPA_JF_KNOWN_TYPE:
3210 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
3211 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
3212 jump_func->value.known_type.component_type = stream_read_tree (ib,
3213 data_in);
3214 break;
3215 case IPA_JF_CONST:
3216 jump_func->value.constant = stream_read_tree (ib, data_in);
3217 break;
3218 case IPA_JF_PASS_THROUGH:
3219 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
3220 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
3221 jump_func->value.pass_through.operation
3222 = (enum tree_code) streamer_read_uhwi (ib);
3223 bp = streamer_read_bitpack (ib);
3224 jump_func->value.pass_through.agg_preserved = bp_unpack_value (&bp, 1);
3225 break;
3226 case IPA_JF_ANCESTOR:
3227 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
3228 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
3229 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
3230 bp = streamer_read_bitpack (ib);
3231 jump_func->value.ancestor.agg_preserved = bp_unpack_value (&bp, 1);
3232 break;
3233 }
3234
3235 count = streamer_read_uhwi (ib);
3236 jump_func->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, count);
3237 if (count)
3238 {
3239 bp = streamer_read_bitpack (ib);
3240 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3241 }
3242 for (i = 0; i < count; i++)
3243 {
3244 struct ipa_agg_jf_item item;
3245 item.offset = streamer_read_uhwi (ib);
3246 item.value = stream_read_tree (ib, data_in);
3247 VEC_quick_push (ipa_agg_jf_item_t, jump_func->agg.items, item);
3248 }
3249 }
3250
3251 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3252 relevant to indirect inlining to OB. */
3253
3254 static void
3255 ipa_write_indirect_edge_info (struct output_block *ob,
3256 struct cgraph_edge *cs)
3257 {
3258 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3259 struct bitpack_d bp;
3260
3261 streamer_write_hwi (ob, ii->param_index);
3262 streamer_write_hwi (ob, ii->offset);
3263 bp = bitpack_create (ob->main_stream);
3264 bp_pack_value (&bp, ii->polymorphic, 1);
3265 bp_pack_value (&bp, ii->agg_contents, 1);
3266 bp_pack_value (&bp, ii->by_ref, 1);
3267 streamer_write_bitpack (&bp);
3268
3269 if (ii->polymorphic)
3270 {
3271 streamer_write_hwi (ob, ii->otr_token);
3272 stream_write_tree (ob, ii->otr_type, true);
3273 }
3274 }
3275
3276 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3277 relevant to indirect inlining from IB. */
3278
3279 static void
3280 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3281 struct data_in *data_in ATTRIBUTE_UNUSED,
3282 struct cgraph_edge *cs)
3283 {
3284 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3285 struct bitpack_d bp;
3286
3287 ii->param_index = (int) streamer_read_hwi (ib);
3288 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3289 bp = streamer_read_bitpack (ib);
3290 ii->polymorphic = bp_unpack_value (&bp, 1);
3291 ii->agg_contents = bp_unpack_value (&bp, 1);
3292 ii->by_ref = bp_unpack_value (&bp, 1);
3293 if (ii->polymorphic)
3294 {
3295 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3296 ii->otr_type = stream_read_tree (ib, data_in);
3297 }
3298 }
3299
3300 /* Stream out NODE info to OB. */
3301
3302 static void
3303 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3304 {
3305 int node_ref;
3306 lto_symtab_encoder_t encoder;
3307 struct ipa_node_params *info = IPA_NODE_REF (node);
3308 int j;
3309 struct cgraph_edge *e;
3310 struct bitpack_d bp;
3311
3312 encoder = ob->decl_state->symtab_node_encoder;
3313 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3314 streamer_write_uhwi (ob, node_ref);
3315
3316 bp = bitpack_create (ob->main_stream);
3317 gcc_assert (info->uses_analysis_done
3318 || ipa_get_param_count (info) == 0);
3319 gcc_assert (!info->node_enqueued);
3320 gcc_assert (!info->ipcp_orig_node);
3321 for (j = 0; j < ipa_get_param_count (info); j++)
3322 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3323 streamer_write_bitpack (&bp);
3324 for (e = node->callees; e; e = e->next_callee)
3325 {
3326 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3327
3328 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3329 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3330 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3331 }
3332 for (e = node->indirect_calls; e; e = e->next_callee)
3333 {
3334 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3335
3336 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3337 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3338 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3339 ipa_write_indirect_edge_info (ob, e);
3340 }
3341 }
3342
3343 /* Stream in NODE info from IB. */
3344
3345 static void
3346 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3347 struct data_in *data_in)
3348 {
3349 struct ipa_node_params *info = IPA_NODE_REF (node);
3350 int k;
3351 struct cgraph_edge *e;
3352 struct bitpack_d bp;
3353
3354 ipa_initialize_node_params (node);
3355
3356 bp = streamer_read_bitpack (ib);
3357 if (ipa_get_param_count (info) != 0)
3358 info->uses_analysis_done = true;
3359 info->node_enqueued = false;
3360 for (k = 0; k < ipa_get_param_count (info); k++)
3361 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3362 for (e = node->callees; e; e = e->next_callee)
3363 {
3364 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3365 int count = streamer_read_uhwi (ib);
3366
3367 if (!count)
3368 continue;
3369 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
3370
3371 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3372 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
3373 }
3374 for (e = node->indirect_calls; e; e = e->next_callee)
3375 {
3376 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3377 int count = streamer_read_uhwi (ib);
3378
3379 if (count)
3380 {
3381 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
3382 count);
3383 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3384 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
3385 data_in);
3386 }
3387 ipa_read_indirect_edge_info (ib, data_in, e);
3388 }
3389 }
3390
3391 /* Write jump functions for nodes in SET. */
3392
3393 void
3394 ipa_prop_write_jump_functions (void)
3395 {
3396 struct cgraph_node *node;
3397 struct output_block *ob;
3398 unsigned int count = 0;
3399 lto_symtab_encoder_iterator lsei;
3400 lto_symtab_encoder_t encoder;
3401
3402
3403 if (!ipa_node_params_vector)
3404 return;
3405
3406 ob = create_output_block (LTO_section_jump_functions);
3407 encoder = ob->decl_state->symtab_node_encoder;
3408 ob->cgraph_node = NULL;
3409 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3410 lsei_next_function_in_partition (&lsei))
3411 {
3412 node = lsei_cgraph_node (lsei);
3413 if (cgraph_function_with_gimple_body_p (node)
3414 && IPA_NODE_REF (node) != NULL)
3415 count++;
3416 }
3417
3418 streamer_write_uhwi (ob, count);
3419
3420 /* Process all of the functions. */
3421 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3422 lsei_next_function_in_partition (&lsei))
3423 {
3424 node = lsei_cgraph_node (lsei);
3425 if (cgraph_function_with_gimple_body_p (node)
3426 && IPA_NODE_REF (node) != NULL)
3427 ipa_write_node_info (ob, node);
3428 }
3429 streamer_write_char_stream (ob->main_stream, 0);
3430 produce_asm (ob, NULL);
3431 destroy_output_block (ob);
3432 }
3433
3434 /* Read section in file FILE_DATA of length LEN with data DATA. */
3435
3436 static void
3437 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3438 size_t len)
3439 {
3440 const struct lto_function_header *header =
3441 (const struct lto_function_header *) data;
3442 const int cfg_offset = sizeof (struct lto_function_header);
3443 const int main_offset = cfg_offset + header->cfg_size;
3444 const int string_offset = main_offset + header->main_size;
3445 struct data_in *data_in;
3446 struct lto_input_block ib_main;
3447 unsigned int i;
3448 unsigned int count;
3449
3450 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3451 header->main_size);
3452
3453 data_in =
3454 lto_data_in_create (file_data, (const char *) data + string_offset,
3455 header->string_size, NULL);
3456 count = streamer_read_uhwi (&ib_main);
3457
3458 for (i = 0; i < count; i++)
3459 {
3460 unsigned int index;
3461 struct cgraph_node *node;
3462 lto_symtab_encoder_t encoder;
3463
3464 index = streamer_read_uhwi (&ib_main);
3465 encoder = file_data->symtab_node_encoder;
3466 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3467 gcc_assert (node->analyzed);
3468 ipa_read_node_info (&ib_main, node, data_in);
3469 }
3470 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3471 len);
3472 lto_data_in_delete (data_in);
3473 }
3474
3475 /* Read ipcp jump functions. */
3476
3477 void
3478 ipa_prop_read_jump_functions (void)
3479 {
3480 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3481 struct lto_file_decl_data *file_data;
3482 unsigned int j = 0;
3483
3484 ipa_check_create_node_params ();
3485 ipa_check_create_edge_args ();
3486 ipa_register_cgraph_hooks ();
3487
3488 while ((file_data = file_data_vec[j++]))
3489 {
3490 size_t len;
3491 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3492
3493 if (data)
3494 ipa_prop_read_section (file_data, data, len);
3495 }
3496 }
3497
3498 /* After merging units, we can get mismatch in argument counts.
3499 Also decl merging might've rendered parameter lists obsolete.
3500 Also compute called_with_variable_arg info. */
3501
3502 void
3503 ipa_update_after_lto_read (void)
3504 {
3505 struct cgraph_node *node;
3506
3507 ipa_check_create_node_params ();
3508 ipa_check_create_edge_args ();
3509
3510 FOR_EACH_DEFINED_FUNCTION (node)
3511 if (node->analyzed)
3512 ipa_initialize_node_params (node);
3513 }