backport: As described in http://gcc.gnu.org/ml/gcc/2012-08/msg00015.html...
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40
41
42 /* Intermediate information about a parameter that is only useful during the
43 run of ipa_analyze_node and is not kept afterwards. */
44
45 struct param_analysis_info
46 {
47 bool parm_modified, ref_modified, pt_modified;
48 bitmap parm_visited_statements, pt_visited_statements;
49 };
50
51 /* Vector where the parameter infos are actually stored. */
52 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
53 /* Vector where the parameter infos are actually stored. */
54 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
55
56 /* Holders of ipa cgraph hooks: */
57 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
58 static struct cgraph_node_hook_list *node_removal_hook_holder;
59 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
60 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
61 static struct cgraph_node_hook_list *function_insertion_hook_holder;
62
63 /* Return index of the formal whose tree is PTREE in function which corresponds
64 to INFO. */
65
66 int
67 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
68 {
69 int i, count;
70
71 count = ipa_get_param_count (info);
72 for (i = 0; i < count; i++)
73 if (ipa_get_param (info, i) == ptree)
74 return i;
75
76 return -1;
77 }
78
79 /* Populate the param_decl field in parameter descriptors of INFO that
80 corresponds to NODE. */
81
82 static void
83 ipa_populate_param_decls (struct cgraph_node *node,
84 struct ipa_node_params *info)
85 {
86 tree fndecl;
87 tree fnargs;
88 tree parm;
89 int param_num;
90
91 fndecl = node->symbol.decl;
92 fnargs = DECL_ARGUMENTS (fndecl);
93 param_num = 0;
94 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
95 {
96 VEC_index (ipa_param_descriptor_t,
97 info->descriptors, param_num).decl = parm;
98 param_num++;
99 }
100 }
101
102 /* Return how many formal parameters FNDECL has. */
103
104 static inline int
105 count_formal_params (tree fndecl)
106 {
107 tree parm;
108 int count = 0;
109
110 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
111 count++;
112
113 return count;
114 }
115
116 /* Initialize the ipa_node_params structure associated with NODE by counting
117 the function parameters, creating the descriptors and populating their
118 param_decls. */
119
120 void
121 ipa_initialize_node_params (struct cgraph_node *node)
122 {
123 struct ipa_node_params *info = IPA_NODE_REF (node);
124
125 if (!info->descriptors)
126 {
127 int param_count;
128
129 param_count = count_formal_params (node->symbol.decl);
130 if (param_count)
131 {
132 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
133 info->descriptors, param_count);
134 ipa_populate_param_decls (node, info);
135 }
136 }
137 }
138
139 /* Print the jump functions associated with call graph edge CS to file F. */
140
141 static void
142 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
143 {
144 int i, count;
145
146 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
147 for (i = 0; i < count; i++)
148 {
149 struct ipa_jump_func *jump_func;
150 enum jump_func_type type;
151
152 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
153 type = jump_func->type;
154
155 fprintf (f, " param %d: ", i);
156 if (type == IPA_JF_UNKNOWN)
157 fprintf (f, "UNKNOWN\n");
158 else if (type == IPA_JF_KNOWN_TYPE)
159 {
160 fprintf (f, "KNOWN TYPE: base ");
161 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
162 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
163 jump_func->value.known_type.offset);
164 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
165 fprintf (f, "\n");
166 }
167 else if (type == IPA_JF_CONST)
168 {
169 tree val = jump_func->value.constant;
170 fprintf (f, "CONST: ");
171 print_generic_expr (f, val, 0);
172 if (TREE_CODE (val) == ADDR_EXPR
173 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
174 {
175 fprintf (f, " -> ");
176 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
177 0);
178 }
179 fprintf (f, "\n");
180 }
181 else if (type == IPA_JF_PASS_THROUGH)
182 {
183 fprintf (f, "PASS THROUGH: ");
184 fprintf (f, "%d, op %s",
185 jump_func->value.pass_through.formal_id,
186 tree_code_name[(int)
187 jump_func->value.pass_through.operation]);
188 if (jump_func->value.pass_through.operation != NOP_EXPR)
189 {
190 fprintf (f, " ");
191 print_generic_expr (f,
192 jump_func->value.pass_through.operand, 0);
193 }
194 if (jump_func->value.pass_through.agg_preserved)
195 fprintf (f, ", agg_preserved");
196 fprintf (f, "\n");
197 }
198 else if (type == IPA_JF_ANCESTOR)
199 {
200 fprintf (f, "ANCESTOR: ");
201 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
202 jump_func->value.ancestor.formal_id,
203 jump_func->value.ancestor.offset);
204 print_generic_expr (f, jump_func->value.ancestor.type, 0);
205 if (jump_func->value.ancestor.agg_preserved)
206 fprintf (f, ", agg_preserved");
207 fprintf (f, "\n");
208 }
209
210 if (jump_func->agg.items)
211 {
212 struct ipa_agg_jf_item *item;
213 int j;
214
215 fprintf (f, " Aggregate passed by %s:\n",
216 jump_func->agg.by_ref ? "reference" : "value");
217 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items,
218 j, item)
219 {
220 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
221 item->offset);
222 if (TYPE_P (item->value))
223 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
224 tree_low_cst (TYPE_SIZE (item->value), 1));
225 else
226 {
227 fprintf (f, "cst: ");
228 print_generic_expr (f, item->value, 0);
229 }
230 fprintf (f, "\n");
231 }
232 }
233 }
234 }
235
236
237 /* Print the jump functions of all arguments on all call graph edges going from
238 NODE to file F. */
239
240 void
241 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
242 {
243 struct cgraph_edge *cs;
244 int i;
245
246 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
247 for (cs = node->callees; cs; cs = cs->next_callee)
248 {
249 if (!ipa_edge_args_info_available_for_edge_p (cs))
250 continue;
251
252 fprintf (f, " callsite %s/%i -> %s/%i : \n",
253 xstrdup (cgraph_node_name (node)), node->uid,
254 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
255 ipa_print_node_jump_functions_for_edge (f, cs);
256 }
257
258 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
259 {
260 if (!ipa_edge_args_info_available_for_edge_p (cs))
261 continue;
262
263 if (cs->call_stmt)
264 {
265 fprintf (f, " indirect callsite %d for stmt ", i);
266 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
267 }
268 else
269 fprintf (f, " indirect callsite %d :\n", i);
270 ipa_print_node_jump_functions_for_edge (f, cs);
271
272 }
273 }
274
275 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
276
277 void
278 ipa_print_all_jump_functions (FILE *f)
279 {
280 struct cgraph_node *node;
281
282 fprintf (f, "\nJump functions:\n");
283 FOR_EACH_FUNCTION (node)
284 {
285 ipa_print_node_jump_functions (f, node);
286 }
287 }
288
289 /* Set JFUNC to be a known type jump function. */
290
291 static void
292 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
293 tree base_type, tree component_type)
294 {
295 jfunc->type = IPA_JF_KNOWN_TYPE;
296 jfunc->value.known_type.offset = offset,
297 jfunc->value.known_type.base_type = base_type;
298 jfunc->value.known_type.component_type = component_type;
299 }
300
301 /* Set JFUNC to be a constant jmp function. */
302
303 static void
304 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
305 {
306 jfunc->type = IPA_JF_CONST;
307 jfunc->value.constant = constant;
308 }
309
310 /* Set JFUNC to be a simple pass-through jump function. */
311 static void
312 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
313 bool agg_preserved)
314 {
315 jfunc->type = IPA_JF_PASS_THROUGH;
316 jfunc->value.pass_through.operand = NULL_TREE;
317 jfunc->value.pass_through.formal_id = formal_id;
318 jfunc->value.pass_through.operation = NOP_EXPR;
319 jfunc->value.pass_through.agg_preserved = agg_preserved;
320 }
321
322 /* Set JFUNC to be an arithmetic pass through jump function. */
323
324 static void
325 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
326 tree operand, enum tree_code operation)
327 {
328 jfunc->type = IPA_JF_PASS_THROUGH;
329 jfunc->value.pass_through.operand = operand;
330 jfunc->value.pass_through.formal_id = formal_id;
331 jfunc->value.pass_through.operation = operation;
332 jfunc->value.pass_through.agg_preserved = false;
333 }
334
335 /* Set JFUNC to be an ancestor jump function. */
336
337 static void
338 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
339 tree type, int formal_id, bool agg_preserved)
340 {
341 jfunc->type = IPA_JF_ANCESTOR;
342 jfunc->value.ancestor.formal_id = formal_id;
343 jfunc->value.ancestor.offset = offset;
344 jfunc->value.ancestor.type = type;
345 jfunc->value.ancestor.agg_preserved = agg_preserved;
346 }
347
348 /* Structure to be passed in between detect_type_change and
349 check_stmt_for_type_change. */
350
351 struct type_change_info
352 {
353 /* Offset into the object where there is the virtual method pointer we are
354 looking for. */
355 HOST_WIDE_INT offset;
356 /* The declaration or SSA_NAME pointer of the base that we are checking for
357 type change. */
358 tree object;
359 /* If we actually can tell the type that the object has changed to, it is
360 stored in this field. Otherwise it remains NULL_TREE. */
361 tree known_current_type;
362 /* Set to true if dynamic type change has been detected. */
363 bool type_maybe_changed;
364 /* Set to true if multiple types have been encountered. known_current_type
365 must be disregarded in that case. */
366 bool multiple_types_encountered;
367 };
368
369 /* Return true if STMT can modify a virtual method table pointer.
370
371 This function makes special assumptions about both constructors and
372 destructors which are all the functions that are allowed to alter the VMT
373 pointers. It assumes that destructors begin with assignment into all VMT
374 pointers and that constructors essentially look in the following way:
375
376 1) The very first thing they do is that they call constructors of ancestor
377 sub-objects that have them.
378
379 2) Then VMT pointers of this and all its ancestors is set to new values
380 corresponding to the type corresponding to the constructor.
381
382 3) Only afterwards, other stuff such as constructor of member sub-objects
383 and the code written by the user is run. Only this may include calling
384 virtual functions, directly or indirectly.
385
386 There is no way to call a constructor of an ancestor sub-object in any
387 other way.
388
389 This means that we do not have to care whether constructors get the correct
390 type information because they will always change it (in fact, if we define
391 the type to be given by the VMT pointer, it is undefined).
392
393 The most important fact to derive from the above is that if, for some
394 statement in the section 3, we try to detect whether the dynamic type has
395 changed, we can safely ignore all calls as we examine the function body
396 backwards until we reach statements in section 2 because these calls cannot
397 be ancestor constructors or destructors (if the input is not bogus) and so
398 do not change the dynamic type (this holds true only for automatically
399 allocated objects but at the moment we devirtualize only these). We then
400 must detect that statements in section 2 change the dynamic type and can try
401 to derive the new type. That is enough and we can stop, we will never see
402 the calls into constructors of sub-objects in this code. Therefore we can
403 safely ignore all call statements that we traverse.
404 */
405
406 static bool
407 stmt_may_be_vtbl_ptr_store (gimple stmt)
408 {
409 if (is_gimple_call (stmt))
410 return false;
411 else if (is_gimple_assign (stmt))
412 {
413 tree lhs = gimple_assign_lhs (stmt);
414
415 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
416 {
417 if (flag_strict_aliasing
418 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
419 return false;
420
421 if (TREE_CODE (lhs) == COMPONENT_REF
422 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
423 return false;
424 /* In the future we might want to use get_base_ref_and_offset to find
425 if there is a field corresponding to the offset and if so, proceed
426 almost like if it was a component ref. */
427 }
428 }
429 return true;
430 }
431
432 /* If STMT can be proved to be an assignment to the virtual method table
433 pointer of ANALYZED_OBJ and the type associated with the new table
434 identified, return the type. Otherwise return NULL_TREE. */
435
436 static tree
437 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
438 {
439 HOST_WIDE_INT offset, size, max_size;
440 tree lhs, rhs, base;
441
442 if (!gimple_assign_single_p (stmt))
443 return NULL_TREE;
444
445 lhs = gimple_assign_lhs (stmt);
446 rhs = gimple_assign_rhs1 (stmt);
447 if (TREE_CODE (lhs) != COMPONENT_REF
448 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
449 || TREE_CODE (rhs) != ADDR_EXPR)
450 return NULL_TREE;
451 rhs = get_base_address (TREE_OPERAND (rhs, 0));
452 if (!rhs
453 || TREE_CODE (rhs) != VAR_DECL
454 || !DECL_VIRTUAL_P (rhs))
455 return NULL_TREE;
456
457 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
458 if (offset != tci->offset
459 || size != POINTER_SIZE
460 || max_size != POINTER_SIZE)
461 return NULL_TREE;
462 if (TREE_CODE (base) == MEM_REF)
463 {
464 if (TREE_CODE (tci->object) != MEM_REF
465 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
466 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
467 TREE_OPERAND (base, 1)))
468 return NULL_TREE;
469 }
470 else if (tci->object != base)
471 return NULL_TREE;
472
473 return DECL_CONTEXT (rhs);
474 }
475
476 /* Callback of walk_aliased_vdefs and a helper function for
477 detect_type_change to check whether a particular statement may modify
478 the virtual table pointer, and if possible also determine the new type of
479 the (sub-)object. It stores its result into DATA, which points to a
480 type_change_info structure. */
481
482 static bool
483 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
484 {
485 gimple stmt = SSA_NAME_DEF_STMT (vdef);
486 struct type_change_info *tci = (struct type_change_info *) data;
487
488 if (stmt_may_be_vtbl_ptr_store (stmt))
489 {
490 tree type;
491 type = extr_type_from_vtbl_ptr_store (stmt, tci);
492 if (tci->type_maybe_changed
493 && type != tci->known_current_type)
494 tci->multiple_types_encountered = true;
495 tci->known_current_type = type;
496 tci->type_maybe_changed = true;
497 return true;
498 }
499 else
500 return false;
501 }
502
503
504
505 /* Like detect_type_change but with extra argument COMP_TYPE which will become
506 the component type part of new JFUNC of dynamic type change is detected and
507 the new base type is identified. */
508
509 static bool
510 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
511 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
512 {
513 struct type_change_info tci;
514 ao_ref ao;
515
516 gcc_checking_assert (DECL_P (arg)
517 || TREE_CODE (arg) == MEM_REF
518 || handled_component_p (arg));
519 /* Const calls cannot call virtual methods through VMT and so type changes do
520 not matter. */
521 if (!flag_devirtualize || !gimple_vuse (call))
522 return false;
523
524 ao_ref_init (&ao, arg);
525 ao.base = base;
526 ao.offset = offset;
527 ao.size = POINTER_SIZE;
528 ao.max_size = ao.size;
529
530 tci.offset = offset;
531 tci.object = get_base_address (arg);
532 tci.known_current_type = NULL_TREE;
533 tci.type_maybe_changed = false;
534 tci.multiple_types_encountered = false;
535
536 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
537 &tci, NULL);
538 if (!tci.type_maybe_changed)
539 return false;
540
541 if (!tci.known_current_type
542 || tci.multiple_types_encountered
543 || offset != 0)
544 jfunc->type = IPA_JF_UNKNOWN;
545 else
546 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
547
548 return true;
549 }
550
551 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
552 looking for assignments to its virtual table pointer. If it is, return true
553 and fill in the jump function JFUNC with relevant type information or set it
554 to unknown. ARG is the object itself (not a pointer to it, unless
555 dereferenced). BASE is the base of the memory access as returned by
556 get_ref_base_and_extent, as is the offset. */
557
558 static bool
559 detect_type_change (tree arg, tree base, gimple call,
560 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
561 {
562 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
563 }
564
565 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
566 SSA name (its dereference will become the base and the offset is assumed to
567 be zero). */
568
569 static bool
570 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
571 {
572 tree comp_type;
573
574 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
575 if (!flag_devirtualize
576 || !POINTER_TYPE_P (TREE_TYPE (arg))
577 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
578 return false;
579
580 comp_type = TREE_TYPE (TREE_TYPE (arg));
581 arg = build2 (MEM_REF, ptr_type_node, arg,
582 build_int_cst (ptr_type_node, 0));
583
584 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
585 }
586
587 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
588 boolean variable pointed to by DATA. */
589
590 static bool
591 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
592 void *data)
593 {
594 bool *b = (bool *) data;
595 *b = true;
596 return true;
597 }
598
599 /* Return true if a load from a formal parameter PARM_LOAD is known to retreive
600 a value known not to be modified in this function before reaching the
601 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
602 information about the parameter. */
603
604 static bool
605 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
606 gimple stmt, tree parm_load)
607 {
608 bool modified = false;
609 bitmap *visited_stmts;
610 ao_ref refd;
611
612 if (parm_ainfo && parm_ainfo->parm_modified)
613 return false;
614
615 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
616 ao_ref_init (&refd, parm_load);
617 /* We can cache visited statements only when parm_ainfo is available and when
618 we are looking at a naked load of the whole parameter. */
619 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
620 visited_stmts = NULL;
621 else
622 visited_stmts = &parm_ainfo->parm_visited_statements;
623 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
624 visited_stmts);
625 if (parm_ainfo && modified)
626 parm_ainfo->parm_modified = true;
627 return !modified;
628 }
629
630 /* If STMT is an assignment that loads a value from an parameter declaration,
631 return the index of the parameter in ipa_node_params which has not been
632 modified. Otherwise return -1. */
633
634 static int
635 load_from_unmodified_param (struct ipa_node_params *info,
636 struct param_analysis_info *parms_ainfo,
637 gimple stmt)
638 {
639 int index;
640 tree op1;
641
642 if (!gimple_assign_single_p (stmt))
643 return -1;
644
645 op1 = gimple_assign_rhs1 (stmt);
646 if (TREE_CODE (op1) != PARM_DECL)
647 return -1;
648
649 index = ipa_get_param_decl_index (info, op1);
650 if (index < 0
651 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
652 : NULL, stmt, op1))
653 return -1;
654
655 return index;
656 }
657
658 /* Return true if memory reference REF loads data that are known to be
659 unmodified in this function before reaching statement STMT. PARM_AINFO, if
660 non-NULL, is a pointer to a structure containing temporary information about
661 PARM. */
662
663 static bool
664 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
665 gimple stmt, tree ref)
666 {
667 bool modified = false;
668 ao_ref refd;
669
670 gcc_checking_assert (gimple_vuse (stmt));
671 if (parm_ainfo && parm_ainfo->ref_modified)
672 return false;
673
674 ao_ref_init (&refd, ref);
675 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
676 NULL);
677 if (parm_ainfo && modified)
678 parm_ainfo->ref_modified = true;
679 return !modified;
680 }
681
682 /* Return true if the data pointed to by PARM is known to be unmodified in this
683 function before reaching call statement CALL into which it is passed.
684 PARM_AINFO is a pointer to a structure containing temporary information
685 about PARM. */
686
687 static bool
688 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
689 gimple call, tree parm)
690 {
691 bool modified = false;
692 ao_ref refd;
693
694 /* It's unnecessary to calculate anything about memory contnets for a const
695 function because it is not goin to use it. But do not cache the result
696 either. Also, no such calculations for non-pointers. */
697 if (!gimple_vuse (call)
698 || !POINTER_TYPE_P (TREE_TYPE (parm)))
699 return false;
700
701 if (parm_ainfo->pt_modified)
702 return false;
703
704 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
705 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
706 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
707 if (modified)
708 parm_ainfo->pt_modified = true;
709 return !modified;
710 }
711
712 /* Return true if we can prove that OP is a memory reference loading unmodified
713 data from an aggregate passed as a parameter and if the aggregate is passed
714 by reference, that the alias type of the load corresponds to the type of the
715 formal parameter (so that we can rely on this type for TBAA in callers).
716 INFO and PARMS_AINFO describe parameters of the current function (but the
717 latter can be NULL), STMT is the load statement. If function returns true,
718 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
719 within the aggregate and whether it is a load from a value passed by
720 reference respectively. */
721
722 static bool
723 ipa_load_from_parm_agg_1 (struct ipa_node_params *info,
724 struct param_analysis_info *parms_ainfo, gimple stmt,
725 tree op, int *index_p, HOST_WIDE_INT *offset_p,
726 bool *by_ref_p)
727 {
728 int index;
729 HOST_WIDE_INT size, max_size;
730 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
731
732 if (max_size == -1 || max_size != size || *offset_p < 0)
733 return false;
734
735 if (DECL_P (base))
736 {
737 int index = ipa_get_param_decl_index (info, base);
738 if (index >= 0
739 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
740 : NULL, stmt, op))
741 {
742 *index_p = index;
743 *by_ref_p = false;
744 return true;
745 }
746 return false;
747 }
748
749 if (TREE_CODE (base) != MEM_REF
750 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
751 || !integer_zerop (TREE_OPERAND (base, 1)))
752 return false;
753
754 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
755 {
756 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
757 index = ipa_get_param_decl_index (info, parm);
758 }
759 else
760 {
761 /* This branch catches situations where a pointer parameter is not a
762 gimple register, for example:
763
764 void hip7(S*) (struct S * p)
765 {
766 void (*<T2e4>) (struct S *) D.1867;
767 struct S * p.1;
768
769 <bb 2>:
770 p.1_1 = p;
771 D.1867_2 = p.1_1->f;
772 D.1867_2 ();
773 gdp = &p;
774 */
775
776 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
777 index = load_from_unmodified_param (info, parms_ainfo, def);
778 }
779
780 if (index >= 0
781 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
782 stmt, op))
783 {
784 *index_p = index;
785 *by_ref_p = true;
786 return true;
787 }
788 return false;
789 }
790
791 /* Just like the previous function, just without the param_analysis_info
792 pointer, for users outside of this file. */
793
794 bool
795 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
796 tree op, int *index_p, HOST_WIDE_INT *offset_p,
797 bool *by_ref_p)
798 {
799 return ipa_load_from_parm_agg_1 (info, NULL, stmt, op, index_p, offset_p,
800 by_ref_p);
801 }
802
803 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
804 of an assignment statement STMT, try to determine whether we are actually
805 handling any of the following cases and construct an appropriate jump
806 function into JFUNC if so:
807
808 1) The passed value is loaded from a formal parameter which is not a gimple
809 register (most probably because it is addressable, the value has to be
810 scalar) and we can guarantee the value has not changed. This case can
811 therefore be described by a simple pass-through jump function. For example:
812
813 foo (int a)
814 {
815 int a.0;
816
817 a.0_2 = a;
818 bar (a.0_2);
819
820 2) The passed value can be described by a simple arithmetic pass-through
821 jump function. E.g.
822
823 foo (int a)
824 {
825 int D.2064;
826
827 D.2064_4 = a.1(D) + 4;
828 bar (D.2064_4);
829
830 This case can also occur in combination of the previous one, e.g.:
831
832 foo (int a, int z)
833 {
834 int a.0;
835 int D.2064;
836
837 a.0_3 = a;
838 D.2064_4 = a.0_3 + 4;
839 foo (D.2064_4);
840
841 3) The passed value is an address of an object within another one (which
842 also passed by reference). Such situations are described by an ancestor
843 jump function and describe situations such as:
844
845 B::foo() (struct B * const this)
846 {
847 struct A * D.1845;
848
849 D.1845_2 = &this_1(D)->D.1748;
850 A::bar (D.1845_2);
851
852 INFO is the structure describing individual parameters access different
853 stages of IPA optimizations. PARMS_AINFO contains the information that is
854 only needed for intraprocedural analysis. */
855
856 static void
857 compute_complex_assign_jump_func (struct ipa_node_params *info,
858 struct param_analysis_info *parms_ainfo,
859 struct ipa_jump_func *jfunc,
860 gimple call, gimple stmt, tree name)
861 {
862 HOST_WIDE_INT offset, size, max_size;
863 tree op1, tc_ssa, base, ssa;
864 int index;
865
866 op1 = gimple_assign_rhs1 (stmt);
867
868 if (TREE_CODE (op1) == SSA_NAME)
869 {
870 if (SSA_NAME_IS_DEFAULT_DEF (op1))
871 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
872 else
873 index = load_from_unmodified_param (info, parms_ainfo,
874 SSA_NAME_DEF_STMT (op1));
875 tc_ssa = op1;
876 }
877 else
878 {
879 index = load_from_unmodified_param (info, parms_ainfo, stmt);
880 tc_ssa = gimple_assign_lhs (stmt);
881 }
882
883 if (index >= 0)
884 {
885 tree op2 = gimple_assign_rhs2 (stmt);
886
887 if (op2)
888 {
889 if (!is_gimple_ip_invariant (op2)
890 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
891 && !useless_type_conversion_p (TREE_TYPE (name),
892 TREE_TYPE (op1))))
893 return;
894
895 ipa_set_jf_arith_pass_through (jfunc, index, op2,
896 gimple_assign_rhs_code (stmt));
897 }
898 else if (gimple_assign_single_p (stmt)
899 && !detect_type_change_ssa (tc_ssa, call, jfunc))
900 {
901 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
902 call, tc_ssa);
903 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
904 }
905 return;
906 }
907
908 if (TREE_CODE (op1) != ADDR_EXPR)
909 return;
910 op1 = TREE_OPERAND (op1, 0);
911 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
912 return;
913 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
914 if (TREE_CODE (base) != MEM_REF
915 /* If this is a varying address, punt. */
916 || max_size == -1
917 || max_size != size)
918 return;
919 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
920 ssa = TREE_OPERAND (base, 0);
921 if (TREE_CODE (ssa) != SSA_NAME
922 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
923 || offset < 0)
924 return;
925
926 /* Dynamic types are changed only in constructors and destructors and */
927 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
928 if (index >= 0
929 && !detect_type_change (op1, base, call, jfunc, offset))
930 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
931 parm_ref_data_pass_through_p (&parms_ainfo[index],
932 call, ssa));
933 }
934
935 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
936 it looks like:
937
938 iftmp.1_3 = &obj_2(D)->D.1762;
939
940 The base of the MEM_REF must be a default definition SSA NAME of a
941 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
942 whole MEM_REF expression is returned and the offset calculated from any
943 handled components and the MEM_REF itself is stored into *OFFSET. The whole
944 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
945
946 static tree
947 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
948 {
949 HOST_WIDE_INT size, max_size;
950 tree expr, parm, obj;
951
952 if (!gimple_assign_single_p (assign))
953 return NULL_TREE;
954 expr = gimple_assign_rhs1 (assign);
955
956 if (TREE_CODE (expr) != ADDR_EXPR)
957 return NULL_TREE;
958 expr = TREE_OPERAND (expr, 0);
959 obj = expr;
960 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
961
962 if (TREE_CODE (expr) != MEM_REF
963 /* If this is a varying address, punt. */
964 || max_size == -1
965 || max_size != size
966 || *offset < 0)
967 return NULL_TREE;
968 parm = TREE_OPERAND (expr, 0);
969 if (TREE_CODE (parm) != SSA_NAME
970 || !SSA_NAME_IS_DEFAULT_DEF (parm)
971 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
972 return NULL_TREE;
973
974 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
975 *obj_p = obj;
976 return expr;
977 }
978
979
980 /* Given that an actual argument is an SSA_NAME that is a result of a phi
981 statement PHI, try to find out whether NAME is in fact a
982 multiple-inheritance typecast from a descendant into an ancestor of a formal
983 parameter and thus can be described by an ancestor jump function and if so,
984 write the appropriate function into JFUNC.
985
986 Essentially we want to match the following pattern:
987
988 if (obj_2(D) != 0B)
989 goto <bb 3>;
990 else
991 goto <bb 4>;
992
993 <bb 3>:
994 iftmp.1_3 = &obj_2(D)->D.1762;
995
996 <bb 4>:
997 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
998 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
999 return D.1879_6; */
1000
1001 static void
1002 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1003 struct param_analysis_info *parms_ainfo,
1004 struct ipa_jump_func *jfunc,
1005 gimple call, gimple phi)
1006 {
1007 HOST_WIDE_INT offset;
1008 gimple assign, cond;
1009 basic_block phi_bb, assign_bb, cond_bb;
1010 tree tmp, parm, expr, obj;
1011 int index, i;
1012
1013 if (gimple_phi_num_args (phi) != 2)
1014 return;
1015
1016 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1017 tmp = PHI_ARG_DEF (phi, 0);
1018 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1019 tmp = PHI_ARG_DEF (phi, 1);
1020 else
1021 return;
1022 if (TREE_CODE (tmp) != SSA_NAME
1023 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1024 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1025 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1026 return;
1027
1028 assign = SSA_NAME_DEF_STMT (tmp);
1029 assign_bb = gimple_bb (assign);
1030 if (!single_pred_p (assign_bb))
1031 return;
1032 expr = get_ancestor_addr_info (assign, &obj, &offset);
1033 if (!expr)
1034 return;
1035 parm = TREE_OPERAND (expr, 0);
1036 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1037 gcc_assert (index >= 0);
1038
1039 cond_bb = single_pred (assign_bb);
1040 cond = last_stmt (cond_bb);
1041 if (!cond
1042 || gimple_code (cond) != GIMPLE_COND
1043 || gimple_cond_code (cond) != NE_EXPR
1044 || gimple_cond_lhs (cond) != parm
1045 || !integer_zerop (gimple_cond_rhs (cond)))
1046 return;
1047
1048 phi_bb = gimple_bb (phi);
1049 for (i = 0; i < 2; i++)
1050 {
1051 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1052 if (pred != assign_bb && pred != cond_bb)
1053 return;
1054 }
1055
1056 if (!detect_type_change (obj, expr, call, jfunc, offset))
1057 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1058 parm_ref_data_pass_through_p (&parms_ainfo[index],
1059 call, parm));
1060 }
1061
1062 /* Given OP which is passed as an actual argument to a called function,
1063 determine if it is possible to construct a KNOWN_TYPE jump function for it
1064 and if so, create one and store it to JFUNC. */
1065
1066 static void
1067 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1068 gimple call)
1069 {
1070 HOST_WIDE_INT offset, size, max_size;
1071 tree base;
1072
1073 if (!flag_devirtualize
1074 || TREE_CODE (op) != ADDR_EXPR
1075 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1076 return;
1077
1078 op = TREE_OPERAND (op, 0);
1079 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1080 if (!DECL_P (base)
1081 || max_size == -1
1082 || max_size != size
1083 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1084 || is_global_var (base))
1085 return;
1086
1087 if (!TYPE_BINFO (TREE_TYPE (base))
1088 || detect_type_change (op, base, call, jfunc, offset))
1089 return;
1090
1091 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1092 }
1093
1094 /* Inspect the given TYPE and return true iff it has the same structure (the
1095 same number of fields of the same types) as a C++ member pointer. If
1096 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1097 corresponding fields there. */
1098
1099 static bool
1100 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1101 {
1102 tree fld;
1103
1104 if (TREE_CODE (type) != RECORD_TYPE)
1105 return false;
1106
1107 fld = TYPE_FIELDS (type);
1108 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1109 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1110 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1111 return false;
1112
1113 if (method_ptr)
1114 *method_ptr = fld;
1115
1116 fld = DECL_CHAIN (fld);
1117 if (!fld || INTEGRAL_TYPE_P (fld)
1118 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1119 return false;
1120 if (delta)
1121 *delta = fld;
1122
1123 if (DECL_CHAIN (fld))
1124 return false;
1125
1126 return true;
1127 }
1128
1129 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1130 return the rhs of its defining statement. Otherwise return RHS as it
1131 is. */
1132
1133 static inline tree
1134 get_ssa_def_if_simple_copy (tree rhs)
1135 {
1136 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1137 {
1138 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1139
1140 if (gimple_assign_single_p (def_stmt))
1141 rhs = gimple_assign_rhs1 (def_stmt);
1142 else
1143 break;
1144 }
1145 return rhs;
1146 }
1147
1148 /* TODO: Turn this into a PARAM. */
1149 #define IPA_MAX_AFF_JF_ITEMS 16
1150
1151 /* Simple linked list, describing known contents of an aggregate beforere
1152 call. */
1153
1154 struct ipa_known_agg_contents_list
1155 {
1156 /* Offset and size of the described part of the aggregate. */
1157 HOST_WIDE_INT offset, size;
1158 /* Known constant value or NULL if the contents is known to be unknown. */
1159 tree constant;
1160 /* Pointer to the next structure in the list. */
1161 struct ipa_known_agg_contents_list *next;
1162 };
1163
1164 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1165 in ARG is filled in with constant values. ARG can either be an aggregate
1166 expression or a pointer to an aggregate. JFUNC is the jump function into
1167 which the constants are subsequently stored. */
1168
1169 static void
1170 determine_known_aggregate_parts (gimple call, tree arg,
1171 struct ipa_jump_func *jfunc)
1172 {
1173 struct ipa_known_agg_contents_list *list = NULL;
1174 int item_count = 0, const_count = 0;
1175 HOST_WIDE_INT arg_offset, arg_size;
1176 gimple_stmt_iterator gsi;
1177 tree arg_base;
1178 bool check_ref, by_ref;
1179 ao_ref r;
1180
1181 /* The function operates in three stages. First, we prepare check_ref, r,
1182 arg_base and arg_offset based on what is actually passed as an actual
1183 argument. */
1184
1185 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1186 {
1187 by_ref = true;
1188 if (TREE_CODE (arg) == SSA_NAME)
1189 {
1190 tree type_size;
1191 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1192 return;
1193 check_ref = true;
1194 arg_base = arg;
1195 arg_offset = 0;
1196 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1197 arg_size = tree_low_cst (type_size, 1);
1198 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1199 }
1200 else if (TREE_CODE (arg) == ADDR_EXPR)
1201 {
1202 HOST_WIDE_INT arg_max_size;
1203
1204 arg = TREE_OPERAND (arg, 0);
1205 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1206 &arg_max_size);
1207 if (arg_max_size == -1
1208 || arg_max_size != arg_size
1209 || arg_offset < 0)
1210 return;
1211 if (DECL_P (arg_base))
1212 {
1213 tree size;
1214 check_ref = false;
1215 size = build_int_cst (integer_type_node, arg_size);
1216 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1217 }
1218 else
1219 return;
1220 }
1221 else
1222 return;
1223 }
1224 else
1225 {
1226 HOST_WIDE_INT arg_max_size;
1227
1228 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1229
1230 by_ref = false;
1231 check_ref = false;
1232 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1233 &arg_max_size);
1234 if (arg_max_size == -1
1235 || arg_max_size != arg_size
1236 || arg_offset < 0)
1237 return;
1238
1239 ao_ref_init (&r, arg);
1240 }
1241
1242 /* Second stage walks back the BB, looks at individual statements and as long
1243 as it is confident of how the statements affect contents of the
1244 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1245 describing it. */
1246 gsi = gsi_for_stmt (call);
1247 gsi_prev (&gsi);
1248 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1249 {
1250 struct ipa_known_agg_contents_list *n, **p;
1251 gimple stmt = gsi_stmt (gsi);
1252 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1253 tree lhs, rhs, lhs_base;
1254 bool partial_overlap;
1255
1256 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1257 continue;
1258 if (!gimple_assign_single_p (stmt))
1259 break;
1260
1261 lhs = gimple_assign_lhs (stmt);
1262 rhs = gimple_assign_rhs1 (stmt);
1263 if (!is_gimple_reg_type (rhs))
1264 break;
1265
1266 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1267 &lhs_max_size);
1268 if (lhs_max_size == -1
1269 || lhs_max_size != lhs_size
1270 || (lhs_offset < arg_offset
1271 && lhs_offset + lhs_size > arg_offset)
1272 || (lhs_offset < arg_offset + arg_size
1273 && lhs_offset + lhs_size > arg_offset + arg_size))
1274 break;
1275
1276 if (check_ref)
1277 {
1278 if (TREE_CODE (lhs_base) != MEM_REF
1279 || TREE_OPERAND (lhs_base, 0) != arg_base
1280 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1281 break;
1282 }
1283 else if (lhs_base != arg_base)
1284 break;
1285
1286 if (lhs_offset + lhs_size < arg_offset
1287 || lhs_offset >= (arg_offset + arg_size))
1288 continue;
1289
1290 partial_overlap = false;
1291 p = &list;
1292 while (*p && (*p)->offset < lhs_offset)
1293 {
1294 if ((*p)->offset + (*p)->size > lhs_offset)
1295 {
1296 partial_overlap = true;
1297 break;
1298 }
1299 p = &(*p)->next;
1300 }
1301 if (partial_overlap)
1302 break;
1303 if (*p && (*p)->offset < lhs_offset + lhs_size)
1304 {
1305 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1306 /* We already know this value is subsequently overwritten with
1307 something else. */
1308 continue;
1309 else
1310 /* Otherwise this is a partial overlap which we cannot
1311 represent. */
1312 break;
1313 }
1314
1315 rhs = get_ssa_def_if_simple_copy (rhs);
1316 n = XALLOCA (struct ipa_known_agg_contents_list);
1317 n->size = lhs_size;
1318 n->offset = lhs_offset;
1319 if (is_gimple_ip_invariant (rhs))
1320 {
1321 n->constant = rhs;
1322 const_count++;
1323 }
1324 else
1325 n->constant = NULL_TREE;
1326 n->next = *p;
1327 *p = n;
1328
1329 item_count++;
1330 if (const_count == IPA_MAX_AFF_JF_ITEMS
1331 || item_count == 2 * IPA_MAX_AFF_JF_ITEMS)
1332 break;
1333 }
1334
1335 /* Third stage just goes over the list and creates an appropriate vector of
1336 ipa_agg_jf_item structures out of it, of sourse only if there are
1337 any known constants to begin with. */
1338
1339 if (const_count)
1340 {
1341 jfunc->agg.by_ref = by_ref;
1342 jfunc->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, const_count);
1343 while (list)
1344 {
1345 if (list->constant)
1346 {
1347 struct ipa_agg_jf_item *item;
1348 item = VEC_quick_push (ipa_agg_jf_item_t,
1349 jfunc->agg.items, NULL);
1350 item->offset = list->offset - arg_offset;
1351 item->value = list->constant;
1352 }
1353 list = list->next;
1354 }
1355 }
1356 }
1357
1358 /* Compute jump function for all arguments of callsite CS and insert the
1359 information in the jump_functions array in the ipa_edge_args corresponding
1360 to this callsite. */
1361
1362 static void
1363 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1364 struct cgraph_edge *cs)
1365 {
1366 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1367 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1368 gimple call = cs->call_stmt;
1369 int n, arg_num = gimple_call_num_args (call);
1370
1371 if (arg_num == 0 || args->jump_functions)
1372 return;
1373 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
1374
1375 for (n = 0; n < arg_num; n++)
1376 {
1377 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1378 tree arg = gimple_call_arg (call, n);
1379
1380 if (is_gimple_ip_invariant (arg))
1381 ipa_set_jf_constant (jfunc, arg);
1382 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1383 && TREE_CODE (arg) == PARM_DECL)
1384 {
1385 int index = ipa_get_param_decl_index (info, arg);
1386
1387 gcc_assert (index >=0);
1388 /* Aggregate passed by value, check for pass-through, otherwise we
1389 will attempt to fill in aggregate contents later in this
1390 for cycle. */
1391 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1392 {
1393 ipa_set_jf_simple_pass_through (jfunc, index, false);
1394 continue;
1395 }
1396 }
1397 else if (TREE_CODE (arg) == SSA_NAME)
1398 {
1399 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1400 {
1401 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1402 if (index >= 0
1403 && !detect_type_change_ssa (arg, call, jfunc))
1404 {
1405 bool agg_p;
1406 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1407 call, arg);
1408 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1409 }
1410 }
1411 else
1412 {
1413 gimple stmt = SSA_NAME_DEF_STMT (arg);
1414 if (is_gimple_assign (stmt))
1415 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1416 call, stmt, arg);
1417 else if (gimple_code (stmt) == GIMPLE_PHI)
1418 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1419 call, stmt);
1420 }
1421 }
1422 else
1423 compute_known_type_jump_func (arg, jfunc, call);
1424
1425 if ((jfunc->type != IPA_JF_PASS_THROUGH
1426 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1427 && (jfunc->type != IPA_JF_ANCESTOR
1428 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1429 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1430 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1431 determine_known_aggregate_parts (call, arg, jfunc);
1432 }
1433 }
1434
1435 /* Compute jump functions for all edges - both direct and indirect - outgoing
1436 from NODE. Also count the actual arguments in the process. */
1437
1438 static void
1439 ipa_compute_jump_functions (struct cgraph_node *node,
1440 struct param_analysis_info *parms_ainfo)
1441 {
1442 struct cgraph_edge *cs;
1443
1444 for (cs = node->callees; cs; cs = cs->next_callee)
1445 {
1446 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1447 NULL);
1448 /* We do not need to bother analyzing calls to unknown
1449 functions unless they may become known during lto/whopr. */
1450 if (!callee->analyzed && !flag_lto)
1451 continue;
1452 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1453 }
1454
1455 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1456 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1457 }
1458
1459 /* If STMT looks like a statement loading a value from a member pointer formal
1460 parameter, return that parameter and store the offset of the field to
1461 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1462 might be clobbered). If USE_DELTA, then we look for a use of the delta
1463 field rather than the pfn. */
1464
1465 static tree
1466 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1467 HOST_WIDE_INT *offset_p)
1468 {
1469 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1470
1471 if (!gimple_assign_single_p (stmt))
1472 return NULL_TREE;
1473
1474 rhs = gimple_assign_rhs1 (stmt);
1475 if (TREE_CODE (rhs) == COMPONENT_REF)
1476 {
1477 ref_field = TREE_OPERAND (rhs, 1);
1478 rhs = TREE_OPERAND (rhs, 0);
1479 }
1480 else
1481 ref_field = NULL_TREE;
1482 if (TREE_CODE (rhs) != MEM_REF)
1483 return NULL_TREE;
1484 rec = TREE_OPERAND (rhs, 0);
1485 if (TREE_CODE (rec) != ADDR_EXPR)
1486 return NULL_TREE;
1487 rec = TREE_OPERAND (rec, 0);
1488 if (TREE_CODE (rec) != PARM_DECL
1489 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1490 return NULL_TREE;
1491 ref_offset = TREE_OPERAND (rhs, 1);
1492
1493 if (use_delta)
1494 fld = delta_field;
1495 else
1496 fld = ptr_field;
1497 if (offset_p)
1498 *offset_p = int_bit_position (fld);
1499
1500 if (ref_field)
1501 {
1502 if (integer_nonzerop (ref_offset))
1503 return NULL_TREE;
1504 return ref_field == fld ? rec : NULL_TREE;
1505 }
1506 else
1507 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1508 : NULL_TREE;
1509 }
1510
1511 /* Returns true iff T is an SSA_NAME defined by a statement. */
1512
1513 static bool
1514 ipa_is_ssa_with_stmt_def (tree t)
1515 {
1516 if (TREE_CODE (t) == SSA_NAME
1517 && !SSA_NAME_IS_DEFAULT_DEF (t))
1518 return true;
1519 else
1520 return false;
1521 }
1522
1523 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1524 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1525 indirect call graph edge. */
1526
1527 static struct cgraph_edge *
1528 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1529 {
1530 struct cgraph_edge *cs;
1531
1532 cs = cgraph_edge (node, stmt);
1533 cs->indirect_info->param_index = param_index;
1534 cs->indirect_info->offset = 0;
1535 cs->indirect_info->polymorphic = 0;
1536 cs->indirect_info->agg_contents = 0;
1537 return cs;
1538 }
1539
1540 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1541 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1542 intermediate information about each formal parameter. Currently it checks
1543 whether the call calls a pointer that is a formal parameter and if so, the
1544 parameter is marked with the called flag and an indirect call graph edge
1545 describing the call is created. This is very simple for ordinary pointers
1546 represented in SSA but not-so-nice when it comes to member pointers. The
1547 ugly part of this function does nothing more than trying to match the
1548 pattern of such a call. An example of such a pattern is the gimple dump
1549 below, the call is on the last line:
1550
1551 <bb 2>:
1552 f$__delta_5 = f.__delta;
1553 f$__pfn_24 = f.__pfn;
1554
1555 or
1556 <bb 2>:
1557 f$__delta_5 = MEM[(struct *)&f];
1558 f$__pfn_24 = MEM[(struct *)&f + 4B];
1559
1560 and a few lines below:
1561
1562 <bb 5>
1563 D.2496_3 = (int) f$__pfn_24;
1564 D.2497_4 = D.2496_3 & 1;
1565 if (D.2497_4 != 0)
1566 goto <bb 3>;
1567 else
1568 goto <bb 4>;
1569
1570 <bb 6>:
1571 D.2500_7 = (unsigned int) f$__delta_5;
1572 D.2501_8 = &S + D.2500_7;
1573 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1574 D.2503_10 = *D.2502_9;
1575 D.2504_12 = f$__pfn_24 + -1;
1576 D.2505_13 = (unsigned int) D.2504_12;
1577 D.2506_14 = D.2503_10 + D.2505_13;
1578 D.2507_15 = *D.2506_14;
1579 iftmp.11_16 = (String:: *) D.2507_15;
1580
1581 <bb 7>:
1582 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1583 D.2500_19 = (unsigned int) f$__delta_5;
1584 D.2508_20 = &S + D.2500_19;
1585 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1586
1587 Such patterns are results of simple calls to a member pointer:
1588
1589 int doprinting (int (MyString::* f)(int) const)
1590 {
1591 MyString S ("somestring");
1592
1593 return (S.*f)(4);
1594 }
1595
1596 Moreover, the function also looks for called pointers loaded from aggregates
1597 passed by value or reference. */
1598
1599 static void
1600 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1601 struct ipa_node_params *info,
1602 struct param_analysis_info *parms_ainfo,
1603 gimple call, tree target)
1604 {
1605 gimple def;
1606 tree n1, n2;
1607 gimple d1, d2;
1608 tree rec, rec2, cond;
1609 gimple branch;
1610 int index;
1611 basic_block bb, virt_bb, join;
1612 HOST_WIDE_INT offset;
1613 bool by_ref;
1614
1615 if (SSA_NAME_IS_DEFAULT_DEF (target))
1616 {
1617 tree var = SSA_NAME_VAR (target);
1618 index = ipa_get_param_decl_index (info, var);
1619 if (index >= 0)
1620 ipa_note_param_call (node, index, call);
1621 return;
1622 }
1623
1624 def = SSA_NAME_DEF_STMT (target);
1625 if (gimple_assign_single_p (def)
1626 && ipa_load_from_parm_agg_1 (info, parms_ainfo, def,
1627 gimple_assign_rhs1 (def), &index, &offset,
1628 &by_ref))
1629 {
1630 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1631 cs->indirect_info->offset = offset;
1632 cs->indirect_info->agg_contents = 1;
1633 cs->indirect_info->by_ref = by_ref;
1634 return;
1635 }
1636
1637 /* Now we need to try to match the complex pattern of calling a member
1638 pointer. */
1639 if (gimple_code (def) != GIMPLE_PHI
1640 || gimple_phi_num_args (def) != 2
1641 || !POINTER_TYPE_P (TREE_TYPE (target))
1642 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1643 return;
1644
1645 /* First, we need to check whether one of these is a load from a member
1646 pointer that is a parameter to this function. */
1647 n1 = PHI_ARG_DEF (def, 0);
1648 n2 = PHI_ARG_DEF (def, 1);
1649 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1650 return;
1651 d1 = SSA_NAME_DEF_STMT (n1);
1652 d2 = SSA_NAME_DEF_STMT (n2);
1653
1654 join = gimple_bb (def);
1655 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1656 {
1657 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1658 return;
1659
1660 bb = EDGE_PRED (join, 0)->src;
1661 virt_bb = gimple_bb (d2);
1662 }
1663 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1664 {
1665 bb = EDGE_PRED (join, 1)->src;
1666 virt_bb = gimple_bb (d1);
1667 }
1668 else
1669 return;
1670
1671 /* Second, we need to check that the basic blocks are laid out in the way
1672 corresponding to the pattern. */
1673
1674 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1675 || single_pred (virt_bb) != bb
1676 || single_succ (virt_bb) != join)
1677 return;
1678
1679 /* Third, let's see that the branching is done depending on the least
1680 significant bit of the pfn. */
1681
1682 branch = last_stmt (bb);
1683 if (!branch || gimple_code (branch) != GIMPLE_COND)
1684 return;
1685
1686 if ((gimple_cond_code (branch) != NE_EXPR
1687 && gimple_cond_code (branch) != EQ_EXPR)
1688 || !integer_zerop (gimple_cond_rhs (branch)))
1689 return;
1690
1691 cond = gimple_cond_lhs (branch);
1692 if (!ipa_is_ssa_with_stmt_def (cond))
1693 return;
1694
1695 def = SSA_NAME_DEF_STMT (cond);
1696 if (!is_gimple_assign (def)
1697 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1698 || !integer_onep (gimple_assign_rhs2 (def)))
1699 return;
1700
1701 cond = gimple_assign_rhs1 (def);
1702 if (!ipa_is_ssa_with_stmt_def (cond))
1703 return;
1704
1705 def = SSA_NAME_DEF_STMT (cond);
1706
1707 if (is_gimple_assign (def)
1708 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1709 {
1710 cond = gimple_assign_rhs1 (def);
1711 if (!ipa_is_ssa_with_stmt_def (cond))
1712 return;
1713 def = SSA_NAME_DEF_STMT (cond);
1714 }
1715
1716 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1717 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1718 == ptrmemfunc_vbit_in_delta),
1719 NULL);
1720 if (rec != rec2)
1721 return;
1722
1723 index = ipa_get_param_decl_index (info, rec);
1724 if (index >= 0
1725 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1726 {
1727 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1728 cs->indirect_info->offset = offset;
1729 cs->indirect_info->agg_contents = 1;
1730 }
1731
1732 return;
1733 }
1734
1735 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1736 object referenced in the expression is a formal parameter of the caller
1737 (described by INFO), create a call note for the statement. */
1738
1739 static void
1740 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1741 struct ipa_node_params *info, gimple call,
1742 tree target)
1743 {
1744 struct cgraph_edge *cs;
1745 struct cgraph_indirect_call_info *ii;
1746 struct ipa_jump_func jfunc;
1747 tree obj = OBJ_TYPE_REF_OBJECT (target);
1748 int index;
1749 HOST_WIDE_INT anc_offset;
1750
1751 if (!flag_devirtualize)
1752 return;
1753
1754 if (TREE_CODE (obj) != SSA_NAME)
1755 return;
1756
1757 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1758 {
1759 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1760 return;
1761
1762 anc_offset = 0;
1763 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1764 gcc_assert (index >= 0);
1765 if (detect_type_change_ssa (obj, call, &jfunc))
1766 return;
1767 }
1768 else
1769 {
1770 gimple stmt = SSA_NAME_DEF_STMT (obj);
1771 tree expr;
1772
1773 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1774 if (!expr)
1775 return;
1776 index = ipa_get_param_decl_index (info,
1777 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1778 gcc_assert (index >= 0);
1779 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1780 return;
1781 }
1782
1783 cs = ipa_note_param_call (node, index, call);
1784 ii = cs->indirect_info;
1785 ii->offset = anc_offset;
1786 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1787 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1788 ii->polymorphic = 1;
1789 }
1790
1791 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1792 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1793 containing intermediate information about each formal parameter. */
1794
1795 static void
1796 ipa_analyze_call_uses (struct cgraph_node *node,
1797 struct ipa_node_params *info,
1798 struct param_analysis_info *parms_ainfo, gimple call)
1799 {
1800 tree target = gimple_call_fn (call);
1801
1802 if (!target)
1803 return;
1804 if (TREE_CODE (target) == SSA_NAME)
1805 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1806 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1807 ipa_analyze_virtual_call_uses (node, info, call, target);
1808 }
1809
1810
1811 /* Analyze the call statement STMT with respect to formal parameters (described
1812 in INFO) of caller given by NODE. Currently it only checks whether formal
1813 parameters are called. PARMS_AINFO is a pointer to a vector containing
1814 intermediate information about each formal parameter. */
1815
1816 static void
1817 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1818 struct param_analysis_info *parms_ainfo, gimple stmt)
1819 {
1820 if (is_gimple_call (stmt))
1821 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1822 }
1823
1824 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1825 If OP is a parameter declaration, mark it as used in the info structure
1826 passed in DATA. */
1827
1828 static bool
1829 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1830 tree op, void *data)
1831 {
1832 struct ipa_node_params *info = (struct ipa_node_params *) data;
1833
1834 op = get_base_address (op);
1835 if (op
1836 && TREE_CODE (op) == PARM_DECL)
1837 {
1838 int index = ipa_get_param_decl_index (info, op);
1839 gcc_assert (index >= 0);
1840 ipa_set_param_used (info, index, true);
1841 }
1842
1843 return false;
1844 }
1845
1846 /* Scan the function body of NODE and inspect the uses of formal parameters.
1847 Store the findings in various structures of the associated ipa_node_params
1848 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1849 vector containing intermediate information about each formal parameter. */
1850
1851 static void
1852 ipa_analyze_params_uses (struct cgraph_node *node,
1853 struct param_analysis_info *parms_ainfo)
1854 {
1855 tree decl = node->symbol.decl;
1856 basic_block bb;
1857 struct function *func;
1858 gimple_stmt_iterator gsi;
1859 struct ipa_node_params *info = IPA_NODE_REF (node);
1860 int i;
1861
1862 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1863 return;
1864
1865 for (i = 0; i < ipa_get_param_count (info); i++)
1866 {
1867 tree parm = ipa_get_param (info, i);
1868 tree ddef;
1869 /* For SSA regs see if parameter is used. For non-SSA we compute
1870 the flag during modification analysis. */
1871 if (is_gimple_reg (parm)
1872 && (ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1873 parm)) != NULL_TREE
1874 && !has_zero_uses (ddef))
1875 ipa_set_param_used (info, i, true);
1876 }
1877
1878 func = DECL_STRUCT_FUNCTION (decl);
1879 FOR_EACH_BB_FN (bb, func)
1880 {
1881 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1882 {
1883 gimple stmt = gsi_stmt (gsi);
1884
1885 if (is_gimple_debug (stmt))
1886 continue;
1887
1888 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1889 walk_stmt_load_store_addr_ops (stmt, info,
1890 visit_ref_for_mod_analysis,
1891 visit_ref_for_mod_analysis,
1892 visit_ref_for_mod_analysis);
1893 }
1894 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1895 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1896 visit_ref_for_mod_analysis,
1897 visit_ref_for_mod_analysis,
1898 visit_ref_for_mod_analysis);
1899 }
1900
1901 info->uses_analysis_done = 1;
1902 }
1903
1904 /* Initialize the array describing properties of of formal parameters
1905 of NODE, analyze their uses and compute jump functions associated
1906 with actual arguments of calls from within NODE. */
1907
1908 void
1909 ipa_analyze_node (struct cgraph_node *node)
1910 {
1911 struct ipa_node_params *info;
1912 struct param_analysis_info *parms_ainfo;
1913 int i, param_count;
1914
1915 ipa_check_create_node_params ();
1916 ipa_check_create_edge_args ();
1917 info = IPA_NODE_REF (node);
1918 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
1919 current_function_decl = node->symbol.decl;
1920 ipa_initialize_node_params (node);
1921
1922 param_count = ipa_get_param_count (info);
1923 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1924 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1925
1926 ipa_analyze_params_uses (node, parms_ainfo);
1927 ipa_compute_jump_functions (node, parms_ainfo);
1928
1929 for (i = 0; i < param_count; i++)
1930 {
1931 if (parms_ainfo[i].parm_visited_statements)
1932 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
1933 if (parms_ainfo[i].pt_visited_statements)
1934 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
1935 }
1936
1937 current_function_decl = NULL;
1938 pop_cfun ();
1939 }
1940
1941
1942 /* Update the jump function DST when the call graph edge corresponding to SRC is
1943 is being inlined, knowing that DST is of type ancestor and src of known
1944 type. */
1945
1946 static void
1947 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1948 struct ipa_jump_func *dst)
1949 {
1950 HOST_WIDE_INT combined_offset;
1951 tree combined_type;
1952
1953 combined_offset = ipa_get_jf_known_type_offset (src)
1954 + ipa_get_jf_ancestor_offset (dst);
1955 combined_type = ipa_get_jf_ancestor_type (dst);
1956
1957 ipa_set_jf_known_type (dst, combined_offset,
1958 ipa_get_jf_known_type_base_type (src),
1959 combined_type);
1960 }
1961
1962 /* Update the jump functions associated with call graph edge E when the call
1963 graph edge CS is being inlined, assuming that E->caller is already (possibly
1964 indirectly) inlined into CS->callee and that E has not been inlined. */
1965
1966 static void
1967 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1968 struct cgraph_edge *e)
1969 {
1970 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1971 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1972 int count = ipa_get_cs_argument_count (args);
1973 int i;
1974
1975 for (i = 0; i < count; i++)
1976 {
1977 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1978
1979 if (dst->type == IPA_JF_ANCESTOR)
1980 {
1981 struct ipa_jump_func *src;
1982 int dst_fid = dst->value.ancestor.formal_id;
1983
1984 /* Variable number of arguments can cause havoc if we try to access
1985 one that does not exist in the inlined edge. So make sure we
1986 don't. */
1987 if (dst_fid >= ipa_get_cs_argument_count (top))
1988 {
1989 dst->type = IPA_JF_UNKNOWN;
1990 continue;
1991 }
1992
1993 src = ipa_get_ith_jump_func (top, dst_fid);
1994
1995 if (src->agg.items
1996 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
1997 {
1998 struct ipa_agg_jf_item *item;
1999 int j;
2000
2001 /* Currently we do not produce clobber aggregate jump functions,
2002 replace with merging when we do. */
2003 gcc_assert (!dst->agg.items);
2004
2005 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc, src->agg.items);
2006 dst->agg.by_ref = src->agg.by_ref;
2007 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, dst->agg.items, j, item)
2008 item->offset -= dst->value.ancestor.offset;
2009 }
2010
2011 if (src->type == IPA_JF_KNOWN_TYPE)
2012 combine_known_type_and_ancestor_jfs (src, dst);
2013 else if (src->type == IPA_JF_PASS_THROUGH
2014 && src->value.pass_through.operation == NOP_EXPR)
2015 {
2016 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2017 dst->value.ancestor.agg_preserved &=
2018 src->value.pass_through.agg_preserved;
2019 }
2020 else if (src->type == IPA_JF_ANCESTOR)
2021 {
2022 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2023 dst->value.ancestor.offset += src->value.ancestor.offset;
2024 dst->value.ancestor.agg_preserved &=
2025 src->value.ancestor.agg_preserved;
2026 }
2027 else
2028 dst->type = IPA_JF_UNKNOWN;
2029 }
2030 else if (dst->type == IPA_JF_PASS_THROUGH)
2031 {
2032 struct ipa_jump_func *src;
2033 /* We must check range due to calls with variable number of arguments
2034 and we cannot combine jump functions with operations. */
2035 if (dst->value.pass_through.operation == NOP_EXPR
2036 && (dst->value.pass_through.formal_id
2037 < ipa_get_cs_argument_count (top)))
2038 {
2039 bool agg_p;
2040 int dst_fid = dst->value.pass_through.formal_id;
2041 src = ipa_get_ith_jump_func (top, dst_fid);
2042 agg_p = dst->value.pass_through.agg_preserved;
2043
2044 dst->type = src->type;
2045 dst->value = src->value;
2046
2047 if (src->agg.items
2048 && (agg_p || !src->agg.by_ref))
2049 {
2050 /* Currently we do not produce clobber aggregate jump
2051 functions, replace with merging when we do. */
2052 gcc_assert (!dst->agg.items);
2053
2054 dst->agg.by_ref = src->agg.by_ref;
2055 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc,
2056 src->agg.items);
2057 }
2058
2059 if (!agg_p)
2060 {
2061 if (dst->type == IPA_JF_PASS_THROUGH)
2062 dst->value.pass_through.agg_preserved = false;
2063 else if (dst->type == IPA_JF_ANCESTOR)
2064 dst->value.ancestor.agg_preserved = false;
2065 }
2066 }
2067 else
2068 dst->type = IPA_JF_UNKNOWN;
2069 }
2070 }
2071 }
2072
2073 /* If TARGET is an addr_expr of a function declaration, make it the destination
2074 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2075
2076 struct cgraph_edge *
2077 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2078 {
2079 struct cgraph_node *callee;
2080
2081 if (TREE_CODE (target) == ADDR_EXPR)
2082 target = TREE_OPERAND (target, 0);
2083 if (TREE_CODE (target) != FUNCTION_DECL)
2084 return NULL;
2085 callee = cgraph_get_node (target);
2086 if (!callee)
2087 return NULL;
2088 ipa_check_create_node_params ();
2089
2090 /* We can not make edges to inline clones. It is bug that someone removed
2091 the cgraph node too early. */
2092 gcc_assert (!callee->global.inlined_to);
2093
2094 cgraph_make_edge_direct (ie, callee);
2095 if (dump_file)
2096 {
2097 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2098 "(%s/%i -> %s/%i), for stmt ",
2099 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2100 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
2101 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
2102 if (ie->call_stmt)
2103 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2104 else
2105 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2106 }
2107 callee = cgraph_function_or_thunk_node (callee, NULL);
2108
2109 return ie;
2110 }
2111
2112 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2113 return NULL if there is not any. BY_REF specifies whether the value has to
2114 be passed by reference or by value. */
2115
2116 tree
2117 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2118 HOST_WIDE_INT offset, bool by_ref)
2119 {
2120 struct ipa_agg_jf_item *item;
2121 int i;
2122
2123 if (by_ref != agg->by_ref)
2124 return NULL;
2125
2126 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, agg->items, i, item)
2127 {
2128 if (item->offset == offset)
2129 {
2130 /* Currently we do not have clobber values, return NULL for them once
2131 we do. */
2132 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2133 return item->value;
2134 }
2135 else if (item->offset > offset)
2136 return NULL;
2137 }
2138 return NULL;
2139 }
2140
2141 /* Try to find a destination for indirect edge IE that corresponds to a simple
2142 call or a call of a member function pointer and where the destination is a
2143 pointer formal parameter described by jump function JFUNC. If it can be
2144 determined, return the newly direct edge, otherwise return NULL. */
2145
2146 static struct cgraph_edge *
2147 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2148 struct ipa_jump_func *jfunc)
2149 {
2150 tree target;
2151
2152 if (ie->indirect_info->agg_contents)
2153 {
2154 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2155 ie->indirect_info->offset,
2156 ie->indirect_info->by_ref);
2157 if (!target)
2158 return NULL;
2159 }
2160 else
2161 {
2162 if (jfunc->type != IPA_JF_CONST)
2163 return NULL;
2164 target = ipa_get_jf_constant (jfunc);
2165 }
2166 return ipa_make_edge_direct_to_target (ie, target);
2167 }
2168
2169 /* Try to find a destination for indirect edge IE that corresponds to a
2170 virtual call based on a formal parameter which is described by jump
2171 function JFUNC and if it can be determined, make it direct and return the
2172 direct edge. Otherwise, return NULL. */
2173
2174 static struct cgraph_edge *
2175 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2176 struct ipa_jump_func *jfunc)
2177 {
2178 tree binfo, target;
2179
2180 if (jfunc->type != IPA_JF_KNOWN_TYPE)
2181 return NULL;
2182
2183 binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
2184 gcc_checking_assert (binfo);
2185 binfo = get_binfo_at_offset (binfo, ipa_get_jf_known_type_offset (jfunc)
2186 + ie->indirect_info->offset,
2187 ie->indirect_info->otr_type);
2188 if (binfo)
2189 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2190 binfo);
2191 else
2192 return NULL;
2193
2194 if (target)
2195 return ipa_make_edge_direct_to_target (ie, target);
2196 else
2197 return NULL;
2198 }
2199
2200 /* Update the param called notes associated with NODE when CS is being inlined,
2201 assuming NODE is (potentially indirectly) inlined into CS->callee.
2202 Moreover, if the callee is discovered to be constant, create a new cgraph
2203 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2204 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2205
2206 static bool
2207 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2208 struct cgraph_node *node,
2209 VEC (cgraph_edge_p, heap) **new_edges)
2210 {
2211 struct ipa_edge_args *top;
2212 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2213 bool res = false;
2214
2215 ipa_check_create_edge_args ();
2216 top = IPA_EDGE_REF (cs);
2217
2218 for (ie = node->indirect_calls; ie; ie = next_ie)
2219 {
2220 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2221 struct ipa_jump_func *jfunc;
2222 int param_index;
2223
2224 next_ie = ie->next_callee;
2225
2226 if (ici->param_index == -1)
2227 continue;
2228
2229 /* We must check range due to calls with variable number of arguments: */
2230 if (ici->param_index >= ipa_get_cs_argument_count (top))
2231 {
2232 ici->param_index = -1;
2233 continue;
2234 }
2235
2236 param_index = ici->param_index;
2237 jfunc = ipa_get_ith_jump_func (top, param_index);
2238 if (jfunc->type == IPA_JF_PASS_THROUGH
2239 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2240 {
2241 if (ici->agg_contents
2242 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2243 ici->param_index = -1;
2244 else
2245 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2246 }
2247 else if (jfunc->type == IPA_JF_ANCESTOR)
2248 {
2249 if (ici->agg_contents
2250 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2251 ici->param_index = -1;
2252 else
2253 {
2254 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2255 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2256 }
2257 }
2258 else
2259 /* Either we can find a destination for this edge now or never. */
2260 ici->param_index = -1;
2261
2262 if (!flag_indirect_inlining)
2263 continue;
2264
2265 if (ici->polymorphic)
2266 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
2267 else
2268 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
2269
2270 if (new_direct_edge)
2271 {
2272 new_direct_edge->indirect_inlining_edge = 1;
2273 if (new_direct_edge->call_stmt)
2274 new_direct_edge->call_stmt_cannot_inline_p
2275 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
2276 new_direct_edge->callee->symbol.decl);
2277 if (new_edges)
2278 {
2279 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
2280 new_direct_edge);
2281 top = IPA_EDGE_REF (cs);
2282 res = true;
2283 }
2284 }
2285 }
2286
2287 return res;
2288 }
2289
2290 /* Recursively traverse subtree of NODE (including node) made of inlined
2291 cgraph_edges when CS has been inlined and invoke
2292 update_indirect_edges_after_inlining on all nodes and
2293 update_jump_functions_after_inlining on all non-inlined edges that lead out
2294 of this subtree. Newly discovered indirect edges will be added to
2295 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2296 created. */
2297
2298 static bool
2299 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2300 struct cgraph_node *node,
2301 VEC (cgraph_edge_p, heap) **new_edges)
2302 {
2303 struct cgraph_edge *e;
2304 bool res;
2305
2306 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2307
2308 for (e = node->callees; e; e = e->next_callee)
2309 if (!e->inline_failed)
2310 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2311 else
2312 update_jump_functions_after_inlining (cs, e);
2313 for (e = node->indirect_calls; e; e = e->next_callee)
2314 update_jump_functions_after_inlining (cs, e);
2315
2316 return res;
2317 }
2318
2319 /* Update jump functions and call note functions on inlining the call site CS.
2320 CS is expected to lead to a node already cloned by
2321 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2322 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2323 created. */
2324
2325 bool
2326 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2327 VEC (cgraph_edge_p, heap) **new_edges)
2328 {
2329 bool changed;
2330 /* Do nothing if the preparation phase has not been carried out yet
2331 (i.e. during early inlining). */
2332 if (!ipa_node_params_vector)
2333 return false;
2334 gcc_assert (ipa_edge_args_vector);
2335
2336 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2337
2338 /* We do not keep jump functions of inlined edges up to date. Better to free
2339 them so we do not access them accidentally. */
2340 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2341 return changed;
2342 }
2343
2344 /* Frees all dynamically allocated structures that the argument info points
2345 to. */
2346
2347 void
2348 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2349 {
2350 if (args->jump_functions)
2351 ggc_free (args->jump_functions);
2352
2353 memset (args, 0, sizeof (*args));
2354 }
2355
2356 /* Free all ipa_edge structures. */
2357
2358 void
2359 ipa_free_all_edge_args (void)
2360 {
2361 int i;
2362 struct ipa_edge_args *args;
2363
2364 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
2365 ipa_free_edge_args_substructures (args);
2366
2367 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
2368 ipa_edge_args_vector = NULL;
2369 }
2370
2371 /* Frees all dynamically allocated structures that the param info points
2372 to. */
2373
2374 void
2375 ipa_free_node_params_substructures (struct ipa_node_params *info)
2376 {
2377 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
2378 free (info->lattices);
2379 /* Lattice values and their sources are deallocated with their alocation
2380 pool. */
2381 VEC_free (tree, heap, info->known_vals);
2382 memset (info, 0, sizeof (*info));
2383 }
2384
2385 /* Free all ipa_node_params structures. */
2386
2387 void
2388 ipa_free_all_node_params (void)
2389 {
2390 int i;
2391 struct ipa_node_params *info;
2392
2393 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
2394 ipa_free_node_params_substructures (info);
2395
2396 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
2397 ipa_node_params_vector = NULL;
2398 }
2399
2400 /* Hook that is called by cgraph.c when an edge is removed. */
2401
2402 static void
2403 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2404 {
2405 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2406 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
2407 <= (unsigned)cs->uid)
2408 return;
2409 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2410 }
2411
2412 /* Hook that is called by cgraph.c when a node is removed. */
2413
2414 static void
2415 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2416 {
2417 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2418 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
2419 <= (unsigned)node->uid)
2420 return;
2421 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2422 }
2423
2424 /* Hook that is called by cgraph.c when an edge is duplicated. */
2425
2426 static void
2427 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2428 __attribute__((unused)) void *data)
2429 {
2430 struct ipa_edge_args *old_args, *new_args;
2431 unsigned int i;
2432
2433 ipa_check_create_edge_args ();
2434
2435 old_args = IPA_EDGE_REF (src);
2436 new_args = IPA_EDGE_REF (dst);
2437
2438 new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
2439 old_args->jump_functions);
2440
2441 for (i = 0; i < VEC_length (ipa_jump_func_t, old_args->jump_functions); i++)
2442 VEC_index (ipa_jump_func_t, new_args->jump_functions, i).agg.items
2443 = VEC_copy (ipa_agg_jf_item_t, gc,
2444 VEC_index (ipa_jump_func_t,
2445 old_args->jump_functions, i).agg.items);
2446 }
2447
2448 /* Hook that is called by cgraph.c when a node is duplicated. */
2449
2450 static void
2451 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2452 ATTRIBUTE_UNUSED void *data)
2453 {
2454 struct ipa_node_params *old_info, *new_info;
2455
2456 ipa_check_create_node_params ();
2457 old_info = IPA_NODE_REF (src);
2458 new_info = IPA_NODE_REF (dst);
2459
2460 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
2461 old_info->descriptors);
2462 new_info->lattices = NULL;
2463 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2464
2465 new_info->uses_analysis_done = old_info->uses_analysis_done;
2466 new_info->node_enqueued = old_info->node_enqueued;
2467 }
2468
2469
2470 /* Analyze newly added function into callgraph. */
2471
2472 static void
2473 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2474 {
2475 ipa_analyze_node (node);
2476 }
2477
2478 /* Register our cgraph hooks if they are not already there. */
2479
2480 void
2481 ipa_register_cgraph_hooks (void)
2482 {
2483 if (!edge_removal_hook_holder)
2484 edge_removal_hook_holder =
2485 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2486 if (!node_removal_hook_holder)
2487 node_removal_hook_holder =
2488 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2489 if (!edge_duplication_hook_holder)
2490 edge_duplication_hook_holder =
2491 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2492 if (!node_duplication_hook_holder)
2493 node_duplication_hook_holder =
2494 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2495 function_insertion_hook_holder =
2496 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2497 }
2498
2499 /* Unregister our cgraph hooks if they are not already there. */
2500
2501 static void
2502 ipa_unregister_cgraph_hooks (void)
2503 {
2504 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2505 edge_removal_hook_holder = NULL;
2506 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2507 node_removal_hook_holder = NULL;
2508 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2509 edge_duplication_hook_holder = NULL;
2510 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2511 node_duplication_hook_holder = NULL;
2512 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2513 function_insertion_hook_holder = NULL;
2514 }
2515
2516 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2517 longer needed after ipa-cp. */
2518
2519 void
2520 ipa_free_all_structures_after_ipa_cp (void)
2521 {
2522 if (!optimize)
2523 {
2524 ipa_free_all_edge_args ();
2525 ipa_free_all_node_params ();
2526 free_alloc_pool (ipcp_sources_pool);
2527 free_alloc_pool (ipcp_values_pool);
2528 ipa_unregister_cgraph_hooks ();
2529 }
2530 }
2531
2532 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2533 longer needed after indirect inlining. */
2534
2535 void
2536 ipa_free_all_structures_after_iinln (void)
2537 {
2538 ipa_free_all_edge_args ();
2539 ipa_free_all_node_params ();
2540 ipa_unregister_cgraph_hooks ();
2541 if (ipcp_sources_pool)
2542 free_alloc_pool (ipcp_sources_pool);
2543 if (ipcp_values_pool)
2544 free_alloc_pool (ipcp_values_pool);
2545 }
2546
2547 /* Print ipa_tree_map data structures of all functions in the
2548 callgraph to F. */
2549
2550 void
2551 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2552 {
2553 int i, count;
2554 tree temp;
2555 struct ipa_node_params *info;
2556
2557 if (!node->analyzed)
2558 return;
2559 info = IPA_NODE_REF (node);
2560 fprintf (f, " function %s parameter descriptors:\n",
2561 cgraph_node_name (node));
2562 count = ipa_get_param_count (info);
2563 for (i = 0; i < count; i++)
2564 {
2565 temp = ipa_get_param (info, i);
2566 if (TREE_CODE (temp) == PARM_DECL)
2567 fprintf (f, " param %d : %s", i,
2568 (DECL_NAME (temp)
2569 ? (*lang_hooks.decl_printable_name) (temp, 2)
2570 : "(unnamed)"));
2571 if (ipa_is_param_used (info, i))
2572 fprintf (f, " used");
2573 fprintf (f, "\n");
2574 }
2575 }
2576
2577 /* Print ipa_tree_map data structures of all functions in the
2578 callgraph to F. */
2579
2580 void
2581 ipa_print_all_params (FILE * f)
2582 {
2583 struct cgraph_node *node;
2584
2585 fprintf (f, "\nFunction parameters:\n");
2586 FOR_EACH_FUNCTION (node)
2587 ipa_print_node_params (f, node);
2588 }
2589
2590 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2591
2592 VEC(tree, heap) *
2593 ipa_get_vector_of_formal_parms (tree fndecl)
2594 {
2595 VEC(tree, heap) *args;
2596 int count;
2597 tree parm;
2598
2599 count = count_formal_params (fndecl);
2600 args = VEC_alloc (tree, heap, count);
2601 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2602 VEC_quick_push (tree, args, parm);
2603
2604 return args;
2605 }
2606
2607 /* Return a heap allocated vector containing types of formal parameters of
2608 function type FNTYPE. */
2609
2610 static inline VEC(tree, heap) *
2611 get_vector_of_formal_parm_types (tree fntype)
2612 {
2613 VEC(tree, heap) *types;
2614 int count = 0;
2615 tree t;
2616
2617 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2618 count++;
2619
2620 types = VEC_alloc (tree, heap, count);
2621 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2622 VEC_quick_push (tree, types, TREE_VALUE (t));
2623
2624 return types;
2625 }
2626
2627 /* Modify the function declaration FNDECL and its type according to the plan in
2628 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2629 to reflect the actual parameters being modified which are determined by the
2630 base_index field. */
2631
2632 void
2633 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2634 const char *synth_parm_prefix)
2635 {
2636 VEC(tree, heap) *oparms, *otypes;
2637 tree orig_type, new_type = NULL;
2638 tree old_arg_types, t, new_arg_types = NULL;
2639 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2640 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2641 tree new_reversed = NULL;
2642 bool care_for_types, last_parm_void;
2643
2644 if (!synth_parm_prefix)
2645 synth_parm_prefix = "SYNTH";
2646
2647 oparms = ipa_get_vector_of_formal_parms (fndecl);
2648 orig_type = TREE_TYPE (fndecl);
2649 old_arg_types = TYPE_ARG_TYPES (orig_type);
2650
2651 /* The following test is an ugly hack, some functions simply don't have any
2652 arguments in their type. This is probably a bug but well... */
2653 care_for_types = (old_arg_types != NULL_TREE);
2654 if (care_for_types)
2655 {
2656 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2657 == void_type_node);
2658 otypes = get_vector_of_formal_parm_types (orig_type);
2659 if (last_parm_void)
2660 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2661 else
2662 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2663 }
2664 else
2665 {
2666 last_parm_void = false;
2667 otypes = NULL;
2668 }
2669
2670 for (i = 0; i < len; i++)
2671 {
2672 struct ipa_parm_adjustment *adj;
2673 gcc_assert (link);
2674
2675 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2676 parm = VEC_index (tree, oparms, adj->base_index);
2677 adj->base = parm;
2678
2679 if (adj->copy_param)
2680 {
2681 if (care_for_types)
2682 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2683 adj->base_index),
2684 new_arg_types);
2685 *link = parm;
2686 link = &DECL_CHAIN (parm);
2687 }
2688 else if (!adj->remove_param)
2689 {
2690 tree new_parm;
2691 tree ptype;
2692
2693 if (adj->by_ref)
2694 ptype = build_pointer_type (adj->type);
2695 else
2696 ptype = adj->type;
2697
2698 if (care_for_types)
2699 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2700
2701 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2702 ptype);
2703 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2704
2705 DECL_ARTIFICIAL (new_parm) = 1;
2706 DECL_ARG_TYPE (new_parm) = ptype;
2707 DECL_CONTEXT (new_parm) = fndecl;
2708 TREE_USED (new_parm) = 1;
2709 DECL_IGNORED_P (new_parm) = 1;
2710 layout_decl (new_parm, 0);
2711
2712 adj->base = parm;
2713 adj->reduction = new_parm;
2714
2715 *link = new_parm;
2716
2717 link = &DECL_CHAIN (new_parm);
2718 }
2719 }
2720
2721 *link = NULL_TREE;
2722
2723 if (care_for_types)
2724 {
2725 new_reversed = nreverse (new_arg_types);
2726 if (last_parm_void)
2727 {
2728 if (new_reversed)
2729 TREE_CHAIN (new_arg_types) = void_list_node;
2730 else
2731 new_reversed = void_list_node;
2732 }
2733 }
2734
2735 /* Use copy_node to preserve as much as possible from original type
2736 (debug info, attribute lists etc.)
2737 Exception is METHOD_TYPEs must have THIS argument.
2738 When we are asked to remove it, we need to build new FUNCTION_TYPE
2739 instead. */
2740 if (TREE_CODE (orig_type) != METHOD_TYPE
2741 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0).copy_param
2742 && VEC_index (ipa_parm_adjustment_t, adjustments, 0).base_index == 0))
2743 {
2744 new_type = build_distinct_type_copy (orig_type);
2745 TYPE_ARG_TYPES (new_type) = new_reversed;
2746 }
2747 else
2748 {
2749 new_type
2750 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2751 new_reversed));
2752 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2753 DECL_VINDEX (fndecl) = NULL_TREE;
2754 }
2755
2756 /* When signature changes, we need to clear builtin info. */
2757 if (DECL_BUILT_IN (fndecl))
2758 {
2759 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2760 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2761 }
2762
2763 /* This is a new type, not a copy of an old type. Need to reassociate
2764 variants. We can handle everything except the main variant lazily. */
2765 t = TYPE_MAIN_VARIANT (orig_type);
2766 if (orig_type != t)
2767 {
2768 TYPE_MAIN_VARIANT (new_type) = t;
2769 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2770 TYPE_NEXT_VARIANT (t) = new_type;
2771 }
2772 else
2773 {
2774 TYPE_MAIN_VARIANT (new_type) = new_type;
2775 TYPE_NEXT_VARIANT (new_type) = NULL;
2776 }
2777
2778 TREE_TYPE (fndecl) = new_type;
2779 DECL_VIRTUAL_P (fndecl) = 0;
2780 if (otypes)
2781 VEC_free (tree, heap, otypes);
2782 VEC_free (tree, heap, oparms);
2783 }
2784
2785 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2786 If this is a directly recursive call, CS must be NULL. Otherwise it must
2787 contain the corresponding call graph edge. */
2788
2789 void
2790 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2791 ipa_parm_adjustment_vec adjustments)
2792 {
2793 VEC(tree, heap) *vargs;
2794 VEC(tree, gc) **debug_args = NULL;
2795 gimple new_stmt;
2796 gimple_stmt_iterator gsi;
2797 tree callee_decl;
2798 int i, len;
2799
2800 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2801 vargs = VEC_alloc (tree, heap, len);
2802 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
2803
2804 gsi = gsi_for_stmt (stmt);
2805 for (i = 0; i < len; i++)
2806 {
2807 struct ipa_parm_adjustment *adj;
2808
2809 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2810
2811 if (adj->copy_param)
2812 {
2813 tree arg = gimple_call_arg (stmt, adj->base_index);
2814
2815 VEC_quick_push (tree, vargs, arg);
2816 }
2817 else if (!adj->remove_param)
2818 {
2819 tree expr, base, off;
2820 location_t loc;
2821
2822 /* We create a new parameter out of the value of the old one, we can
2823 do the following kind of transformations:
2824
2825 - A scalar passed by reference is converted to a scalar passed by
2826 value. (adj->by_ref is false and the type of the original
2827 actual argument is a pointer to a scalar).
2828
2829 - A part of an aggregate is passed instead of the whole aggregate.
2830 The part can be passed either by value or by reference, this is
2831 determined by value of adj->by_ref. Moreover, the code below
2832 handles both situations when the original aggregate is passed by
2833 value (its type is not a pointer) and when it is passed by
2834 reference (it is a pointer to an aggregate).
2835
2836 When the new argument is passed by reference (adj->by_ref is true)
2837 it must be a part of an aggregate and therefore we form it by
2838 simply taking the address of a reference inside the original
2839 aggregate. */
2840
2841 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2842 base = gimple_call_arg (stmt, adj->base_index);
2843 loc = EXPR_LOCATION (base);
2844
2845 if (TREE_CODE (base) != ADDR_EXPR
2846 && POINTER_TYPE_P (TREE_TYPE (base)))
2847 off = build_int_cst (adj->alias_ptr_type,
2848 adj->offset / BITS_PER_UNIT);
2849 else
2850 {
2851 HOST_WIDE_INT base_offset;
2852 tree prev_base;
2853
2854 if (TREE_CODE (base) == ADDR_EXPR)
2855 base = TREE_OPERAND (base, 0);
2856 prev_base = base;
2857 base = get_addr_base_and_unit_offset (base, &base_offset);
2858 /* Aggregate arguments can have non-invariant addresses. */
2859 if (!base)
2860 {
2861 base = build_fold_addr_expr (prev_base);
2862 off = build_int_cst (adj->alias_ptr_type,
2863 adj->offset / BITS_PER_UNIT);
2864 }
2865 else if (TREE_CODE (base) == MEM_REF)
2866 {
2867 off = build_int_cst (adj->alias_ptr_type,
2868 base_offset
2869 + adj->offset / BITS_PER_UNIT);
2870 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2871 off);
2872 base = TREE_OPERAND (base, 0);
2873 }
2874 else
2875 {
2876 off = build_int_cst (adj->alias_ptr_type,
2877 base_offset
2878 + adj->offset / BITS_PER_UNIT);
2879 base = build_fold_addr_expr (base);
2880 }
2881 }
2882
2883 if (!adj->by_ref)
2884 {
2885 tree type = adj->type;
2886 unsigned int align;
2887 unsigned HOST_WIDE_INT misalign;
2888
2889 get_pointer_alignment_1 (base, &align, &misalign);
2890 misalign += (double_int_sext (tree_to_double_int (off),
2891 TYPE_PRECISION (TREE_TYPE (off))).low
2892 * BITS_PER_UNIT);
2893 misalign = misalign & (align - 1);
2894 if (misalign != 0)
2895 align = (misalign & -misalign);
2896 if (align < TYPE_ALIGN (type))
2897 type = build_aligned_type (type, align);
2898 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2899 }
2900 else
2901 {
2902 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2903 expr = build_fold_addr_expr (expr);
2904 }
2905
2906 expr = force_gimple_operand_gsi (&gsi, expr,
2907 adj->by_ref
2908 || is_gimple_reg_type (adj->type),
2909 NULL, true, GSI_SAME_STMT);
2910 VEC_quick_push (tree, vargs, expr);
2911 }
2912 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2913 {
2914 unsigned int ix;
2915 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2916 gimple def_temp;
2917
2918 arg = gimple_call_arg (stmt, adj->base_index);
2919 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2920 {
2921 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2922 continue;
2923 arg = fold_convert_loc (gimple_location (stmt),
2924 TREE_TYPE (origin), arg);
2925 }
2926 if (debug_args == NULL)
2927 debug_args = decl_debug_args_insert (callee_decl);
2928 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
2929 if (ddecl == origin)
2930 {
2931 ddecl = VEC_index (tree, *debug_args, ix + 1);
2932 break;
2933 }
2934 if (ddecl == NULL)
2935 {
2936 ddecl = make_node (DEBUG_EXPR_DECL);
2937 DECL_ARTIFICIAL (ddecl) = 1;
2938 TREE_TYPE (ddecl) = TREE_TYPE (origin);
2939 DECL_MODE (ddecl) = DECL_MODE (origin);
2940
2941 VEC_safe_push (tree, gc, *debug_args, origin);
2942 VEC_safe_push (tree, gc, *debug_args, ddecl);
2943 }
2944 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
2945 stmt);
2946 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
2947 }
2948 }
2949
2950 if (dump_file && (dump_flags & TDF_DETAILS))
2951 {
2952 fprintf (dump_file, "replacing stmt:");
2953 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2954 }
2955
2956 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2957 VEC_free (tree, heap, vargs);
2958 if (gimple_call_lhs (stmt))
2959 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2960
2961 gimple_set_block (new_stmt, gimple_block (stmt));
2962 if (gimple_has_location (stmt))
2963 gimple_set_location (new_stmt, gimple_location (stmt));
2964 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2965 gimple_call_copy_flags (new_stmt, stmt);
2966
2967 if (dump_file && (dump_flags & TDF_DETAILS))
2968 {
2969 fprintf (dump_file, "with stmt:");
2970 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2971 fprintf (dump_file, "\n");
2972 }
2973 gsi_replace (&gsi, new_stmt, true);
2974 if (cs)
2975 cgraph_set_call_stmt (cs, new_stmt);
2976 update_ssa (TODO_update_ssa);
2977 free_dominance_info (CDI_DOMINATORS);
2978 }
2979
2980 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2981
2982 static bool
2983 index_in_adjustments_multiple_times_p (int base_index,
2984 ipa_parm_adjustment_vec adjustments)
2985 {
2986 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2987 bool one = false;
2988
2989 for (i = 0; i < len; i++)
2990 {
2991 struct ipa_parm_adjustment *adj;
2992 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2993
2994 if (adj->base_index == base_index)
2995 {
2996 if (one)
2997 return true;
2998 else
2999 one = true;
3000 }
3001 }
3002 return false;
3003 }
3004
3005
3006 /* Return adjustments that should have the same effect on function parameters
3007 and call arguments as if they were first changed according to adjustments in
3008 INNER and then by adjustments in OUTER. */
3009
3010 ipa_parm_adjustment_vec
3011 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3012 ipa_parm_adjustment_vec outer)
3013 {
3014 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
3015 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
3016 int removals = 0;
3017 ipa_parm_adjustment_vec adjustments, tmp;
3018
3019 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
3020 for (i = 0; i < inlen; i++)
3021 {
3022 struct ipa_parm_adjustment *n;
3023 n = &VEC_index (ipa_parm_adjustment_t, inner, i);
3024
3025 if (n->remove_param)
3026 removals++;
3027 else
3028 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
3029 }
3030
3031 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
3032 for (i = 0; i < outlen; i++)
3033 {
3034 struct ipa_parm_adjustment *r;
3035 struct ipa_parm_adjustment *out = &VEC_index (ipa_parm_adjustment_t,
3036 outer, i);
3037 struct ipa_parm_adjustment *in = &VEC_index (ipa_parm_adjustment_t, tmp,
3038 out->base_index);
3039
3040 gcc_assert (!in->remove_param);
3041 if (out->remove_param)
3042 {
3043 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3044 {
3045 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3046 memset (r, 0, sizeof (*r));
3047 r->remove_param = true;
3048 }
3049 continue;
3050 }
3051
3052 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3053 memset (r, 0, sizeof (*r));
3054 r->base_index = in->base_index;
3055 r->type = out->type;
3056
3057 /* FIXME: Create nonlocal value too. */
3058
3059 if (in->copy_param && out->copy_param)
3060 r->copy_param = true;
3061 else if (in->copy_param)
3062 r->offset = out->offset;
3063 else if (out->copy_param)
3064 r->offset = in->offset;
3065 else
3066 r->offset = in->offset + out->offset;
3067 }
3068
3069 for (i = 0; i < inlen; i++)
3070 {
3071 struct ipa_parm_adjustment *n = &VEC_index (ipa_parm_adjustment_t,
3072 inner, i);
3073
3074 if (n->remove_param)
3075 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
3076 }
3077
3078 VEC_free (ipa_parm_adjustment_t, heap, tmp);
3079 return adjustments;
3080 }
3081
3082 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3083 friendly way, assuming they are meant to be applied to FNDECL. */
3084
3085 void
3086 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3087 tree fndecl)
3088 {
3089 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
3090 bool first = true;
3091 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
3092
3093 fprintf (file, "IPA param adjustments: ");
3094 for (i = 0; i < len; i++)
3095 {
3096 struct ipa_parm_adjustment *adj;
3097 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
3098
3099 if (!first)
3100 fprintf (file, " ");
3101 else
3102 first = false;
3103
3104 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3105 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
3106 if (adj->base)
3107 {
3108 fprintf (file, ", base: ");
3109 print_generic_expr (file, adj->base, 0);
3110 }
3111 if (adj->reduction)
3112 {
3113 fprintf (file, ", reduction: ");
3114 print_generic_expr (file, adj->reduction, 0);
3115 }
3116 if (adj->new_ssa_base)
3117 {
3118 fprintf (file, ", new_ssa_base: ");
3119 print_generic_expr (file, adj->new_ssa_base, 0);
3120 }
3121
3122 if (adj->copy_param)
3123 fprintf (file, ", copy_param");
3124 else if (adj->remove_param)
3125 fprintf (file, ", remove_param");
3126 else
3127 fprintf (file, ", offset %li", (long) adj->offset);
3128 if (adj->by_ref)
3129 fprintf (file, ", by_ref");
3130 print_node_brief (file, ", type: ", adj->type, 0);
3131 fprintf (file, "\n");
3132 }
3133 VEC_free (tree, heap, parms);
3134 }
3135
3136 /* Stream out jump function JUMP_FUNC to OB. */
3137
3138 static void
3139 ipa_write_jump_function (struct output_block *ob,
3140 struct ipa_jump_func *jump_func)
3141 {
3142 struct ipa_agg_jf_item *item;
3143 struct bitpack_d bp;
3144 int i, count;
3145
3146 streamer_write_uhwi (ob, jump_func->type);
3147 switch (jump_func->type)
3148 {
3149 case IPA_JF_UNKNOWN:
3150 break;
3151 case IPA_JF_KNOWN_TYPE:
3152 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3153 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3154 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3155 break;
3156 case IPA_JF_CONST:
3157 stream_write_tree (ob, jump_func->value.constant, true);
3158 break;
3159 case IPA_JF_PASS_THROUGH:
3160 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3161 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3162 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3163 bp = bitpack_create (ob->main_stream);
3164 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3165 streamer_write_bitpack (&bp);
3166 break;
3167 case IPA_JF_ANCESTOR:
3168 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3169 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3170 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3171 bp = bitpack_create (ob->main_stream);
3172 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3173 streamer_write_bitpack (&bp);
3174 break;
3175 }
3176
3177 count = VEC_length (ipa_agg_jf_item_t, jump_func->agg.items);
3178 streamer_write_uhwi (ob, count);
3179 if (count)
3180 {
3181 bp = bitpack_create (ob->main_stream);
3182 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3183 streamer_write_bitpack (&bp);
3184 }
3185
3186 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items, i, item)
3187 {
3188 streamer_write_uhwi (ob, item->offset);
3189 stream_write_tree (ob, item->value, true);
3190 }
3191 }
3192
3193 /* Read in jump function JUMP_FUNC from IB. */
3194
3195 static void
3196 ipa_read_jump_function (struct lto_input_block *ib,
3197 struct ipa_jump_func *jump_func,
3198 struct data_in *data_in)
3199 {
3200 struct bitpack_d bp;
3201 int i, count;
3202
3203 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
3204 switch (jump_func->type)
3205 {
3206 case IPA_JF_UNKNOWN:
3207 break;
3208 case IPA_JF_KNOWN_TYPE:
3209 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
3210 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
3211 jump_func->value.known_type.component_type = stream_read_tree (ib,
3212 data_in);
3213 break;
3214 case IPA_JF_CONST:
3215 jump_func->value.constant = stream_read_tree (ib, data_in);
3216 break;
3217 case IPA_JF_PASS_THROUGH:
3218 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
3219 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
3220 jump_func->value.pass_through.operation
3221 = (enum tree_code) streamer_read_uhwi (ib);
3222 bp = streamer_read_bitpack (ib);
3223 jump_func->value.pass_through.agg_preserved = bp_unpack_value (&bp, 1);
3224 break;
3225 case IPA_JF_ANCESTOR:
3226 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
3227 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
3228 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
3229 bp = streamer_read_bitpack (ib);
3230 jump_func->value.ancestor.agg_preserved = bp_unpack_value (&bp, 1);
3231 break;
3232 }
3233
3234 count = streamer_read_uhwi (ib);
3235 jump_func->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, count);
3236 if (count)
3237 {
3238 bp = streamer_read_bitpack (ib);
3239 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3240 }
3241 for (i = 0; i < count; i++)
3242 {
3243 struct ipa_agg_jf_item *item = VEC_quick_push (ipa_agg_jf_item_t,
3244 jump_func->agg.items, NULL);
3245
3246 item->offset = streamer_read_uhwi (ib);
3247 item->value = stream_read_tree (ib, data_in);
3248 }
3249 }
3250
3251 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3252 relevant to indirect inlining to OB. */
3253
3254 static void
3255 ipa_write_indirect_edge_info (struct output_block *ob,
3256 struct cgraph_edge *cs)
3257 {
3258 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3259 struct bitpack_d bp;
3260
3261 streamer_write_hwi (ob, ii->param_index);
3262 streamer_write_hwi (ob, ii->offset);
3263 bp = bitpack_create (ob->main_stream);
3264 bp_pack_value (&bp, ii->polymorphic, 1);
3265 bp_pack_value (&bp, ii->agg_contents, 1);
3266 bp_pack_value (&bp, ii->by_ref, 1);
3267 streamer_write_bitpack (&bp);
3268
3269 if (ii->polymorphic)
3270 {
3271 streamer_write_hwi (ob, ii->otr_token);
3272 stream_write_tree (ob, ii->otr_type, true);
3273 }
3274 }
3275
3276 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3277 relevant to indirect inlining from IB. */
3278
3279 static void
3280 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3281 struct data_in *data_in ATTRIBUTE_UNUSED,
3282 struct cgraph_edge *cs)
3283 {
3284 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3285 struct bitpack_d bp;
3286
3287 ii->param_index = (int) streamer_read_hwi (ib);
3288 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3289 bp = streamer_read_bitpack (ib);
3290 ii->polymorphic = bp_unpack_value (&bp, 1);
3291 ii->agg_contents = bp_unpack_value (&bp, 1);
3292 ii->by_ref = bp_unpack_value (&bp, 1);
3293 if (ii->polymorphic)
3294 {
3295 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3296 ii->otr_type = stream_read_tree (ib, data_in);
3297 }
3298 }
3299
3300 /* Stream out NODE info to OB. */
3301
3302 static void
3303 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3304 {
3305 int node_ref;
3306 lto_symtab_encoder_t encoder;
3307 struct ipa_node_params *info = IPA_NODE_REF (node);
3308 int j;
3309 struct cgraph_edge *e;
3310 struct bitpack_d bp;
3311
3312 encoder = ob->decl_state->symtab_node_encoder;
3313 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3314 streamer_write_uhwi (ob, node_ref);
3315
3316 bp = bitpack_create (ob->main_stream);
3317 gcc_assert (info->uses_analysis_done
3318 || ipa_get_param_count (info) == 0);
3319 gcc_assert (!info->node_enqueued);
3320 gcc_assert (!info->ipcp_orig_node);
3321 for (j = 0; j < ipa_get_param_count (info); j++)
3322 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3323 streamer_write_bitpack (&bp);
3324 for (e = node->callees; e; e = e->next_callee)
3325 {
3326 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3327
3328 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3329 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3330 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3331 }
3332 for (e = node->indirect_calls; e; e = e->next_callee)
3333 {
3334 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3335
3336 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3337 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3338 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3339 ipa_write_indirect_edge_info (ob, e);
3340 }
3341 }
3342
3343 /* Stream in NODE info from IB. */
3344
3345 static void
3346 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3347 struct data_in *data_in)
3348 {
3349 struct ipa_node_params *info = IPA_NODE_REF (node);
3350 int k;
3351 struct cgraph_edge *e;
3352 struct bitpack_d bp;
3353
3354 ipa_initialize_node_params (node);
3355
3356 bp = streamer_read_bitpack (ib);
3357 if (ipa_get_param_count (info) != 0)
3358 info->uses_analysis_done = true;
3359 info->node_enqueued = false;
3360 for (k = 0; k < ipa_get_param_count (info); k++)
3361 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3362 for (e = node->callees; e; e = e->next_callee)
3363 {
3364 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3365 int count = streamer_read_uhwi (ib);
3366
3367 if (!count)
3368 continue;
3369 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
3370
3371 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3372 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
3373 }
3374 for (e = node->indirect_calls; e; e = e->next_callee)
3375 {
3376 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3377 int count = streamer_read_uhwi (ib);
3378
3379 if (count)
3380 {
3381 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
3382 count);
3383 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3384 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
3385 data_in);
3386 }
3387 ipa_read_indirect_edge_info (ib, data_in, e);
3388 }
3389 }
3390
3391 /* Write jump functions for nodes in SET. */
3392
3393 void
3394 ipa_prop_write_jump_functions (void)
3395 {
3396 struct cgraph_node *node;
3397 struct output_block *ob;
3398 unsigned int count = 0;
3399 lto_symtab_encoder_iterator lsei;
3400 lto_symtab_encoder_t encoder;
3401
3402
3403 if (!ipa_node_params_vector)
3404 return;
3405
3406 ob = create_output_block (LTO_section_jump_functions);
3407 encoder = ob->decl_state->symtab_node_encoder;
3408 ob->cgraph_node = NULL;
3409 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3410 lsei_next_function_in_partition (&lsei))
3411 {
3412 node = lsei_cgraph_node (lsei);
3413 if (cgraph_function_with_gimple_body_p (node)
3414 && IPA_NODE_REF (node) != NULL)
3415 count++;
3416 }
3417
3418 streamer_write_uhwi (ob, count);
3419
3420 /* Process all of the functions. */
3421 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3422 lsei_next_function_in_partition (&lsei))
3423 {
3424 node = lsei_cgraph_node (lsei);
3425 if (cgraph_function_with_gimple_body_p (node)
3426 && IPA_NODE_REF (node) != NULL)
3427 ipa_write_node_info (ob, node);
3428 }
3429 streamer_write_char_stream (ob->main_stream, 0);
3430 produce_asm (ob, NULL);
3431 destroy_output_block (ob);
3432 }
3433
3434 /* Read section in file FILE_DATA of length LEN with data DATA. */
3435
3436 static void
3437 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3438 size_t len)
3439 {
3440 const struct lto_function_header *header =
3441 (const struct lto_function_header *) data;
3442 const int cfg_offset = sizeof (struct lto_function_header);
3443 const int main_offset = cfg_offset + header->cfg_size;
3444 const int string_offset = main_offset + header->main_size;
3445 struct data_in *data_in;
3446 struct lto_input_block ib_main;
3447 unsigned int i;
3448 unsigned int count;
3449
3450 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3451 header->main_size);
3452
3453 data_in =
3454 lto_data_in_create (file_data, (const char *) data + string_offset,
3455 header->string_size, NULL);
3456 count = streamer_read_uhwi (&ib_main);
3457
3458 for (i = 0; i < count; i++)
3459 {
3460 unsigned int index;
3461 struct cgraph_node *node;
3462 lto_symtab_encoder_t encoder;
3463
3464 index = streamer_read_uhwi (&ib_main);
3465 encoder = file_data->symtab_node_encoder;
3466 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3467 gcc_assert (node->analyzed);
3468 ipa_read_node_info (&ib_main, node, data_in);
3469 }
3470 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3471 len);
3472 lto_data_in_delete (data_in);
3473 }
3474
3475 /* Read ipcp jump functions. */
3476
3477 void
3478 ipa_prop_read_jump_functions (void)
3479 {
3480 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3481 struct lto_file_decl_data *file_data;
3482 unsigned int j = 0;
3483
3484 ipa_check_create_node_params ();
3485 ipa_check_create_edge_args ();
3486 ipa_register_cgraph_hooks ();
3487
3488 while ((file_data = file_data_vec[j++]))
3489 {
3490 size_t len;
3491 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3492
3493 if (data)
3494 ipa_prop_read_section (file_data, data, len);
3495 }
3496 }
3497
3498 /* After merging units, we can get mismatch in argument counts.
3499 Also decl merging might've rendered parameter lists obsolete.
3500 Also compute called_with_variable_arg info. */
3501
3502 void
3503 ipa_update_after_lto_read (void)
3504 {
3505 struct cgraph_node *node;
3506
3507 ipa_check_create_node_params ();
3508 ipa_check_create_edge_args ();
3509
3510 FOR_EACH_DEFINED_FUNCTION (node)
3511 if (node->analyzed)
3512 ipa_initialize_node_params (node);
3513 }