ipa-prop.c (prune_expression_for_jf): New function.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
41
42
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
45
46 struct param_analysis_info
47 {
48 bool parm_modified, ref_modified, pt_modified;
49 bitmap parm_visited_statements, pt_visited_statements;
50 };
51
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
56
57 /* Holders of ipa cgraph hooks: */
58 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
59 static struct cgraph_node_hook_list *node_removal_hook_holder;
60 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
61 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
62 static struct cgraph_node_hook_list *function_insertion_hook_holder;
63
64 /* Return index of the formal whose tree is PTREE in function which corresponds
65 to INFO. */
66
67 int
68 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
69 {
70 int i, count;
71
72 count = ipa_get_param_count (info);
73 for (i = 0; i < count; i++)
74 if (ipa_get_param (info, i) == ptree)
75 return i;
76
77 return -1;
78 }
79
80 /* Populate the param_decl field in parameter descriptors of INFO that
81 corresponds to NODE. */
82
83 static void
84 ipa_populate_param_decls (struct cgraph_node *node,
85 struct ipa_node_params *info)
86 {
87 tree fndecl;
88 tree fnargs;
89 tree parm;
90 int param_num;
91
92 fndecl = node->symbol.decl;
93 fnargs = DECL_ARGUMENTS (fndecl);
94 param_num = 0;
95 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
96 {
97 VEC_index (ipa_param_descriptor_t,
98 info->descriptors, param_num).decl = parm;
99 param_num++;
100 }
101 }
102
103 /* Return how many formal parameters FNDECL has. */
104
105 static inline int
106 count_formal_params (tree fndecl)
107 {
108 tree parm;
109 int count = 0;
110
111 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
112 count++;
113
114 return count;
115 }
116
117 /* Initialize the ipa_node_params structure associated with NODE by counting
118 the function parameters, creating the descriptors and populating their
119 param_decls. */
120
121 void
122 ipa_initialize_node_params (struct cgraph_node *node)
123 {
124 struct ipa_node_params *info = IPA_NODE_REF (node);
125
126 if (!info->descriptors)
127 {
128 int param_count;
129
130 param_count = count_formal_params (node->symbol.decl);
131 if (param_count)
132 {
133 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
134 info->descriptors, param_count);
135 ipa_populate_param_decls (node, info);
136 }
137 }
138 }
139
140 /* Print the jump functions associated with call graph edge CS to file F. */
141
142 static void
143 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
144 {
145 int i, count;
146
147 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
148 for (i = 0; i < count; i++)
149 {
150 struct ipa_jump_func *jump_func;
151 enum jump_func_type type;
152
153 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
154 type = jump_func->type;
155
156 fprintf (f, " param %d: ", i);
157 if (type == IPA_JF_UNKNOWN)
158 fprintf (f, "UNKNOWN\n");
159 else if (type == IPA_JF_KNOWN_TYPE)
160 {
161 fprintf (f, "KNOWN TYPE: base ");
162 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
163 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
164 jump_func->value.known_type.offset);
165 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
166 fprintf (f, "\n");
167 }
168 else if (type == IPA_JF_CONST)
169 {
170 tree val = jump_func->value.constant;
171 fprintf (f, "CONST: ");
172 print_generic_expr (f, val, 0);
173 if (TREE_CODE (val) == ADDR_EXPR
174 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
175 {
176 fprintf (f, " -> ");
177 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
178 0);
179 }
180 fprintf (f, "\n");
181 }
182 else if (type == IPA_JF_PASS_THROUGH)
183 {
184 fprintf (f, "PASS THROUGH: ");
185 fprintf (f, "%d, op %s",
186 jump_func->value.pass_through.formal_id,
187 tree_code_name[(int)
188 jump_func->value.pass_through.operation]);
189 if (jump_func->value.pass_through.operation != NOP_EXPR)
190 {
191 fprintf (f, " ");
192 print_generic_expr (f,
193 jump_func->value.pass_through.operand, 0);
194 }
195 if (jump_func->value.pass_through.agg_preserved)
196 fprintf (f, ", agg_preserved");
197 fprintf (f, "\n");
198 }
199 else if (type == IPA_JF_ANCESTOR)
200 {
201 fprintf (f, "ANCESTOR: ");
202 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
203 jump_func->value.ancestor.formal_id,
204 jump_func->value.ancestor.offset);
205 print_generic_expr (f, jump_func->value.ancestor.type, 0);
206 if (jump_func->value.ancestor.agg_preserved)
207 fprintf (f, ", agg_preserved");
208 fprintf (f, "\n");
209 }
210
211 if (jump_func->agg.items)
212 {
213 struct ipa_agg_jf_item *item;
214 int j;
215
216 fprintf (f, " Aggregate passed by %s:\n",
217 jump_func->agg.by_ref ? "reference" : "value");
218 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items,
219 j, item)
220 {
221 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
222 item->offset);
223 if (TYPE_P (item->value))
224 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
225 tree_low_cst (TYPE_SIZE (item->value), 1));
226 else
227 {
228 fprintf (f, "cst: ");
229 print_generic_expr (f, item->value, 0);
230 }
231 fprintf (f, "\n");
232 }
233 }
234 }
235 }
236
237
238 /* Print the jump functions of all arguments on all call graph edges going from
239 NODE to file F. */
240
241 void
242 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
243 {
244 struct cgraph_edge *cs;
245 int i;
246
247 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
248 for (cs = node->callees; cs; cs = cs->next_callee)
249 {
250 if (!ipa_edge_args_info_available_for_edge_p (cs))
251 continue;
252
253 fprintf (f, " callsite %s/%i -> %s/%i : \n",
254 xstrdup (cgraph_node_name (node)), node->uid,
255 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
256 ipa_print_node_jump_functions_for_edge (f, cs);
257 }
258
259 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
260 {
261 if (!ipa_edge_args_info_available_for_edge_p (cs))
262 continue;
263
264 if (cs->call_stmt)
265 {
266 fprintf (f, " indirect callsite %d for stmt ", i);
267 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
268 }
269 else
270 fprintf (f, " indirect callsite %d :\n", i);
271 ipa_print_node_jump_functions_for_edge (f, cs);
272
273 }
274 }
275
276 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
277
278 void
279 ipa_print_all_jump_functions (FILE *f)
280 {
281 struct cgraph_node *node;
282
283 fprintf (f, "\nJump functions:\n");
284 FOR_EACH_FUNCTION (node)
285 {
286 ipa_print_node_jump_functions (f, node);
287 }
288 }
289
290 /* Return the expression tree EXPR unshared and with location stripped off. */
291
292 static tree
293 prune_expression_for_jf (tree exp)
294 {
295 if (EXPR_P (exp))
296 {
297 exp = unshare_expr (exp);
298 SET_EXPR_LOCATION (exp, UNKNOWN_LOCATION);
299 }
300 return exp;
301 }
302
303 /* Set JFUNC to be a known type jump function. */
304
305 static void
306 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
307 tree base_type, tree component_type)
308 {
309 jfunc->type = IPA_JF_KNOWN_TYPE;
310 jfunc->value.known_type.offset = offset,
311 jfunc->value.known_type.base_type = base_type;
312 jfunc->value.known_type.component_type = component_type;
313 }
314
315 /* Set JFUNC to be a constant jmp function. */
316
317 static void
318 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
319 {
320 constant = unshare_expr (constant);
321 if (constant && EXPR_P (constant))
322 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
323 jfunc->type = IPA_JF_CONST;
324 jfunc->value.constant = prune_expression_for_jf (constant);
325 }
326
327 /* Set JFUNC to be a simple pass-through jump function. */
328 static void
329 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
330 bool agg_preserved)
331 {
332 jfunc->type = IPA_JF_PASS_THROUGH;
333 jfunc->value.pass_through.operand = NULL_TREE;
334 jfunc->value.pass_through.formal_id = formal_id;
335 jfunc->value.pass_through.operation = NOP_EXPR;
336 jfunc->value.pass_through.agg_preserved = agg_preserved;
337 }
338
339 /* Set JFUNC to be an arithmetic pass through jump function. */
340
341 static void
342 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
343 tree operand, enum tree_code operation)
344 {
345 jfunc->type = IPA_JF_PASS_THROUGH;
346 jfunc->value.pass_through.operand = prune_expression_for_jf (operand);
347 jfunc->value.pass_through.formal_id = formal_id;
348 jfunc->value.pass_through.operation = operation;
349 jfunc->value.pass_through.agg_preserved = false;
350 }
351
352 /* Set JFUNC to be an ancestor jump function. */
353
354 static void
355 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
356 tree type, int formal_id, bool agg_preserved)
357 {
358 jfunc->type = IPA_JF_ANCESTOR;
359 jfunc->value.ancestor.formal_id = formal_id;
360 jfunc->value.ancestor.offset = offset;
361 jfunc->value.ancestor.type = type;
362 jfunc->value.ancestor.agg_preserved = agg_preserved;
363 }
364
365 /* Structure to be passed in between detect_type_change and
366 check_stmt_for_type_change. */
367
368 struct type_change_info
369 {
370 /* Offset into the object where there is the virtual method pointer we are
371 looking for. */
372 HOST_WIDE_INT offset;
373 /* The declaration or SSA_NAME pointer of the base that we are checking for
374 type change. */
375 tree object;
376 /* If we actually can tell the type that the object has changed to, it is
377 stored in this field. Otherwise it remains NULL_TREE. */
378 tree known_current_type;
379 /* Set to true if dynamic type change has been detected. */
380 bool type_maybe_changed;
381 /* Set to true if multiple types have been encountered. known_current_type
382 must be disregarded in that case. */
383 bool multiple_types_encountered;
384 };
385
386 /* Return true if STMT can modify a virtual method table pointer.
387
388 This function makes special assumptions about both constructors and
389 destructors which are all the functions that are allowed to alter the VMT
390 pointers. It assumes that destructors begin with assignment into all VMT
391 pointers and that constructors essentially look in the following way:
392
393 1) The very first thing they do is that they call constructors of ancestor
394 sub-objects that have them.
395
396 2) Then VMT pointers of this and all its ancestors is set to new values
397 corresponding to the type corresponding to the constructor.
398
399 3) Only afterwards, other stuff such as constructor of member sub-objects
400 and the code written by the user is run. Only this may include calling
401 virtual functions, directly or indirectly.
402
403 There is no way to call a constructor of an ancestor sub-object in any
404 other way.
405
406 This means that we do not have to care whether constructors get the correct
407 type information because they will always change it (in fact, if we define
408 the type to be given by the VMT pointer, it is undefined).
409
410 The most important fact to derive from the above is that if, for some
411 statement in the section 3, we try to detect whether the dynamic type has
412 changed, we can safely ignore all calls as we examine the function body
413 backwards until we reach statements in section 2 because these calls cannot
414 be ancestor constructors or destructors (if the input is not bogus) and so
415 do not change the dynamic type (this holds true only for automatically
416 allocated objects but at the moment we devirtualize only these). We then
417 must detect that statements in section 2 change the dynamic type and can try
418 to derive the new type. That is enough and we can stop, we will never see
419 the calls into constructors of sub-objects in this code. Therefore we can
420 safely ignore all call statements that we traverse.
421 */
422
423 static bool
424 stmt_may_be_vtbl_ptr_store (gimple stmt)
425 {
426 if (is_gimple_call (stmt))
427 return false;
428 else if (is_gimple_assign (stmt))
429 {
430 tree lhs = gimple_assign_lhs (stmt);
431
432 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
433 {
434 if (flag_strict_aliasing
435 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
436 return false;
437
438 if (TREE_CODE (lhs) == COMPONENT_REF
439 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
440 return false;
441 /* In the future we might want to use get_base_ref_and_offset to find
442 if there is a field corresponding to the offset and if so, proceed
443 almost like if it was a component ref. */
444 }
445 }
446 return true;
447 }
448
449 /* If STMT can be proved to be an assignment to the virtual method table
450 pointer of ANALYZED_OBJ and the type associated with the new table
451 identified, return the type. Otherwise return NULL_TREE. */
452
453 static tree
454 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
455 {
456 HOST_WIDE_INT offset, size, max_size;
457 tree lhs, rhs, base;
458
459 if (!gimple_assign_single_p (stmt))
460 return NULL_TREE;
461
462 lhs = gimple_assign_lhs (stmt);
463 rhs = gimple_assign_rhs1 (stmt);
464 if (TREE_CODE (lhs) != COMPONENT_REF
465 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
466 || TREE_CODE (rhs) != ADDR_EXPR)
467 return NULL_TREE;
468 rhs = get_base_address (TREE_OPERAND (rhs, 0));
469 if (!rhs
470 || TREE_CODE (rhs) != VAR_DECL
471 || !DECL_VIRTUAL_P (rhs))
472 return NULL_TREE;
473
474 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
475 if (offset != tci->offset
476 || size != POINTER_SIZE
477 || max_size != POINTER_SIZE)
478 return NULL_TREE;
479 if (TREE_CODE (base) == MEM_REF)
480 {
481 if (TREE_CODE (tci->object) != MEM_REF
482 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
483 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
484 TREE_OPERAND (base, 1)))
485 return NULL_TREE;
486 }
487 else if (tci->object != base)
488 return NULL_TREE;
489
490 return DECL_CONTEXT (rhs);
491 }
492
493 /* Callback of walk_aliased_vdefs and a helper function for
494 detect_type_change to check whether a particular statement may modify
495 the virtual table pointer, and if possible also determine the new type of
496 the (sub-)object. It stores its result into DATA, which points to a
497 type_change_info structure. */
498
499 static bool
500 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
501 {
502 gimple stmt = SSA_NAME_DEF_STMT (vdef);
503 struct type_change_info *tci = (struct type_change_info *) data;
504
505 if (stmt_may_be_vtbl_ptr_store (stmt))
506 {
507 tree type;
508 type = extr_type_from_vtbl_ptr_store (stmt, tci);
509 if (tci->type_maybe_changed
510 && type != tci->known_current_type)
511 tci->multiple_types_encountered = true;
512 tci->known_current_type = type;
513 tci->type_maybe_changed = true;
514 return true;
515 }
516 else
517 return false;
518 }
519
520
521
522 /* Like detect_type_change but with extra argument COMP_TYPE which will become
523 the component type part of new JFUNC of dynamic type change is detected and
524 the new base type is identified. */
525
526 static bool
527 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
528 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
529 {
530 struct type_change_info tci;
531 ao_ref ao;
532
533 gcc_checking_assert (DECL_P (arg)
534 || TREE_CODE (arg) == MEM_REF
535 || handled_component_p (arg));
536 /* Const calls cannot call virtual methods through VMT and so type changes do
537 not matter. */
538 if (!flag_devirtualize || !gimple_vuse (call))
539 return false;
540
541 ao_ref_init (&ao, arg);
542 ao.base = base;
543 ao.offset = offset;
544 ao.size = POINTER_SIZE;
545 ao.max_size = ao.size;
546
547 tci.offset = offset;
548 tci.object = get_base_address (arg);
549 tci.known_current_type = NULL_TREE;
550 tci.type_maybe_changed = false;
551 tci.multiple_types_encountered = false;
552
553 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
554 &tci, NULL);
555 if (!tci.type_maybe_changed)
556 return false;
557
558 if (!tci.known_current_type
559 || tci.multiple_types_encountered
560 || offset != 0)
561 jfunc->type = IPA_JF_UNKNOWN;
562 else
563 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
564
565 return true;
566 }
567
568 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
569 looking for assignments to its virtual table pointer. If it is, return true
570 and fill in the jump function JFUNC with relevant type information or set it
571 to unknown. ARG is the object itself (not a pointer to it, unless
572 dereferenced). BASE is the base of the memory access as returned by
573 get_ref_base_and_extent, as is the offset. */
574
575 static bool
576 detect_type_change (tree arg, tree base, gimple call,
577 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
578 {
579 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
580 }
581
582 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
583 SSA name (its dereference will become the base and the offset is assumed to
584 be zero). */
585
586 static bool
587 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
588 {
589 tree comp_type;
590
591 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
592 if (!flag_devirtualize
593 || !POINTER_TYPE_P (TREE_TYPE (arg))
594 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
595 return false;
596
597 comp_type = TREE_TYPE (TREE_TYPE (arg));
598 arg = build2 (MEM_REF, ptr_type_node, arg,
599 build_int_cst (ptr_type_node, 0));
600
601 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
602 }
603
604 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
605 boolean variable pointed to by DATA. */
606
607 static bool
608 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
609 void *data)
610 {
611 bool *b = (bool *) data;
612 *b = true;
613 return true;
614 }
615
616 /* Return true if a load from a formal parameter PARM_LOAD is known to retreive
617 a value known not to be modified in this function before reaching the
618 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
619 information about the parameter. */
620
621 static bool
622 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
623 gimple stmt, tree parm_load)
624 {
625 bool modified = false;
626 bitmap *visited_stmts;
627 ao_ref refd;
628
629 if (parm_ainfo && parm_ainfo->parm_modified)
630 return false;
631
632 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
633 ao_ref_init (&refd, parm_load);
634 /* We can cache visited statements only when parm_ainfo is available and when
635 we are looking at a naked load of the whole parameter. */
636 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
637 visited_stmts = NULL;
638 else
639 visited_stmts = &parm_ainfo->parm_visited_statements;
640 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
641 visited_stmts);
642 if (parm_ainfo && modified)
643 parm_ainfo->parm_modified = true;
644 return !modified;
645 }
646
647 /* If STMT is an assignment that loads a value from an parameter declaration,
648 return the index of the parameter in ipa_node_params which has not been
649 modified. Otherwise return -1. */
650
651 static int
652 load_from_unmodified_param (struct ipa_node_params *info,
653 struct param_analysis_info *parms_ainfo,
654 gimple stmt)
655 {
656 int index;
657 tree op1;
658
659 if (!gimple_assign_single_p (stmt))
660 return -1;
661
662 op1 = gimple_assign_rhs1 (stmt);
663 if (TREE_CODE (op1) != PARM_DECL)
664 return -1;
665
666 index = ipa_get_param_decl_index (info, op1);
667 if (index < 0
668 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
669 : NULL, stmt, op1))
670 return -1;
671
672 return index;
673 }
674
675 /* Return true if memory reference REF loads data that are known to be
676 unmodified in this function before reaching statement STMT. PARM_AINFO, if
677 non-NULL, is a pointer to a structure containing temporary information about
678 PARM. */
679
680 static bool
681 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
682 gimple stmt, tree ref)
683 {
684 bool modified = false;
685 ao_ref refd;
686
687 gcc_checking_assert (gimple_vuse (stmt));
688 if (parm_ainfo && parm_ainfo->ref_modified)
689 return false;
690
691 ao_ref_init (&refd, ref);
692 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
693 NULL);
694 if (parm_ainfo && modified)
695 parm_ainfo->ref_modified = true;
696 return !modified;
697 }
698
699 /* Return true if the data pointed to by PARM is known to be unmodified in this
700 function before reaching call statement CALL into which it is passed.
701 PARM_AINFO is a pointer to a structure containing temporary information
702 about PARM. */
703
704 static bool
705 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
706 gimple call, tree parm)
707 {
708 bool modified = false;
709 ao_ref refd;
710
711 /* It's unnecessary to calculate anything about memory contnets for a const
712 function because it is not goin to use it. But do not cache the result
713 either. Also, no such calculations for non-pointers. */
714 if (!gimple_vuse (call)
715 || !POINTER_TYPE_P (TREE_TYPE (parm)))
716 return false;
717
718 if (parm_ainfo->pt_modified)
719 return false;
720
721 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
722 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
723 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
724 if (modified)
725 parm_ainfo->pt_modified = true;
726 return !modified;
727 }
728
729 /* Return true if we can prove that OP is a memory reference loading unmodified
730 data from an aggregate passed as a parameter and if the aggregate is passed
731 by reference, that the alias type of the load corresponds to the type of the
732 formal parameter (so that we can rely on this type for TBAA in callers).
733 INFO and PARMS_AINFO describe parameters of the current function (but the
734 latter can be NULL), STMT is the load statement. If function returns true,
735 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
736 within the aggregate and whether it is a load from a value passed by
737 reference respectively. */
738
739 static bool
740 ipa_load_from_parm_agg_1 (struct ipa_node_params *info,
741 struct param_analysis_info *parms_ainfo, gimple stmt,
742 tree op, int *index_p, HOST_WIDE_INT *offset_p,
743 bool *by_ref_p)
744 {
745 int index;
746 HOST_WIDE_INT size, max_size;
747 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
748
749 if (max_size == -1 || max_size != size || *offset_p < 0)
750 return false;
751
752 if (DECL_P (base))
753 {
754 int index = ipa_get_param_decl_index (info, base);
755 if (index >= 0
756 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
757 : NULL, stmt, op))
758 {
759 *index_p = index;
760 *by_ref_p = false;
761 return true;
762 }
763 return false;
764 }
765
766 if (TREE_CODE (base) != MEM_REF
767 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
768 || !integer_zerop (TREE_OPERAND (base, 1)))
769 return false;
770
771 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
772 {
773 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
774 index = ipa_get_param_decl_index (info, parm);
775 }
776 else
777 {
778 /* This branch catches situations where a pointer parameter is not a
779 gimple register, for example:
780
781 void hip7(S*) (struct S * p)
782 {
783 void (*<T2e4>) (struct S *) D.1867;
784 struct S * p.1;
785
786 <bb 2>:
787 p.1_1 = p;
788 D.1867_2 = p.1_1->f;
789 D.1867_2 ();
790 gdp = &p;
791 */
792
793 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
794 index = load_from_unmodified_param (info, parms_ainfo, def);
795 }
796
797 if (index >= 0
798 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
799 stmt, op))
800 {
801 *index_p = index;
802 *by_ref_p = true;
803 return true;
804 }
805 return false;
806 }
807
808 /* Just like the previous function, just without the param_analysis_info
809 pointer, for users outside of this file. */
810
811 bool
812 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
813 tree op, int *index_p, HOST_WIDE_INT *offset_p,
814 bool *by_ref_p)
815 {
816 return ipa_load_from_parm_agg_1 (info, NULL, stmt, op, index_p, offset_p,
817 by_ref_p);
818 }
819
820 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
821 of an assignment statement STMT, try to determine whether we are actually
822 handling any of the following cases and construct an appropriate jump
823 function into JFUNC if so:
824
825 1) The passed value is loaded from a formal parameter which is not a gimple
826 register (most probably because it is addressable, the value has to be
827 scalar) and we can guarantee the value has not changed. This case can
828 therefore be described by a simple pass-through jump function. For example:
829
830 foo (int a)
831 {
832 int a.0;
833
834 a.0_2 = a;
835 bar (a.0_2);
836
837 2) The passed value can be described by a simple arithmetic pass-through
838 jump function. E.g.
839
840 foo (int a)
841 {
842 int D.2064;
843
844 D.2064_4 = a.1(D) + 4;
845 bar (D.2064_4);
846
847 This case can also occur in combination of the previous one, e.g.:
848
849 foo (int a, int z)
850 {
851 int a.0;
852 int D.2064;
853
854 a.0_3 = a;
855 D.2064_4 = a.0_3 + 4;
856 foo (D.2064_4);
857
858 3) The passed value is an address of an object within another one (which
859 also passed by reference). Such situations are described by an ancestor
860 jump function and describe situations such as:
861
862 B::foo() (struct B * const this)
863 {
864 struct A * D.1845;
865
866 D.1845_2 = &this_1(D)->D.1748;
867 A::bar (D.1845_2);
868
869 INFO is the structure describing individual parameters access different
870 stages of IPA optimizations. PARMS_AINFO contains the information that is
871 only needed for intraprocedural analysis. */
872
873 static void
874 compute_complex_assign_jump_func (struct ipa_node_params *info,
875 struct param_analysis_info *parms_ainfo,
876 struct ipa_jump_func *jfunc,
877 gimple call, gimple stmt, tree name)
878 {
879 HOST_WIDE_INT offset, size, max_size;
880 tree op1, tc_ssa, base, ssa;
881 int index;
882
883 op1 = gimple_assign_rhs1 (stmt);
884
885 if (TREE_CODE (op1) == SSA_NAME)
886 {
887 if (SSA_NAME_IS_DEFAULT_DEF (op1))
888 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
889 else
890 index = load_from_unmodified_param (info, parms_ainfo,
891 SSA_NAME_DEF_STMT (op1));
892 tc_ssa = op1;
893 }
894 else
895 {
896 index = load_from_unmodified_param (info, parms_ainfo, stmt);
897 tc_ssa = gimple_assign_lhs (stmt);
898 }
899
900 if (index >= 0)
901 {
902 tree op2 = gimple_assign_rhs2 (stmt);
903
904 if (op2)
905 {
906 if (!is_gimple_ip_invariant (op2)
907 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
908 && !useless_type_conversion_p (TREE_TYPE (name),
909 TREE_TYPE (op1))))
910 return;
911
912 ipa_set_jf_arith_pass_through (jfunc, index, op2,
913 gimple_assign_rhs_code (stmt));
914 }
915 else if (gimple_assign_single_p (stmt)
916 && !detect_type_change_ssa (tc_ssa, call, jfunc))
917 {
918 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
919 call, tc_ssa);
920 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
921 }
922 return;
923 }
924
925 if (TREE_CODE (op1) != ADDR_EXPR)
926 return;
927 op1 = TREE_OPERAND (op1, 0);
928 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
929 return;
930 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
931 if (TREE_CODE (base) != MEM_REF
932 /* If this is a varying address, punt. */
933 || max_size == -1
934 || max_size != size)
935 return;
936 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
937 ssa = TREE_OPERAND (base, 0);
938 if (TREE_CODE (ssa) != SSA_NAME
939 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
940 || offset < 0)
941 return;
942
943 /* Dynamic types are changed only in constructors and destructors and */
944 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
945 if (index >= 0
946 && !detect_type_change (op1, base, call, jfunc, offset))
947 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
948 parm_ref_data_pass_through_p (&parms_ainfo[index],
949 call, ssa));
950 }
951
952 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
953 it looks like:
954
955 iftmp.1_3 = &obj_2(D)->D.1762;
956
957 The base of the MEM_REF must be a default definition SSA NAME of a
958 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
959 whole MEM_REF expression is returned and the offset calculated from any
960 handled components and the MEM_REF itself is stored into *OFFSET. The whole
961 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
962
963 static tree
964 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
965 {
966 HOST_WIDE_INT size, max_size;
967 tree expr, parm, obj;
968
969 if (!gimple_assign_single_p (assign))
970 return NULL_TREE;
971 expr = gimple_assign_rhs1 (assign);
972
973 if (TREE_CODE (expr) != ADDR_EXPR)
974 return NULL_TREE;
975 expr = TREE_OPERAND (expr, 0);
976 obj = expr;
977 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
978
979 if (TREE_CODE (expr) != MEM_REF
980 /* If this is a varying address, punt. */
981 || max_size == -1
982 || max_size != size
983 || *offset < 0)
984 return NULL_TREE;
985 parm = TREE_OPERAND (expr, 0);
986 if (TREE_CODE (parm) != SSA_NAME
987 || !SSA_NAME_IS_DEFAULT_DEF (parm)
988 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
989 return NULL_TREE;
990
991 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
992 *obj_p = obj;
993 return expr;
994 }
995
996
997 /* Given that an actual argument is an SSA_NAME that is a result of a phi
998 statement PHI, try to find out whether NAME is in fact a
999 multiple-inheritance typecast from a descendant into an ancestor of a formal
1000 parameter and thus can be described by an ancestor jump function and if so,
1001 write the appropriate function into JFUNC.
1002
1003 Essentially we want to match the following pattern:
1004
1005 if (obj_2(D) != 0B)
1006 goto <bb 3>;
1007 else
1008 goto <bb 4>;
1009
1010 <bb 3>:
1011 iftmp.1_3 = &obj_2(D)->D.1762;
1012
1013 <bb 4>:
1014 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1015 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1016 return D.1879_6; */
1017
1018 static void
1019 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1020 struct param_analysis_info *parms_ainfo,
1021 struct ipa_jump_func *jfunc,
1022 gimple call, gimple phi)
1023 {
1024 HOST_WIDE_INT offset;
1025 gimple assign, cond;
1026 basic_block phi_bb, assign_bb, cond_bb;
1027 tree tmp, parm, expr, obj;
1028 int index, i;
1029
1030 if (gimple_phi_num_args (phi) != 2)
1031 return;
1032
1033 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1034 tmp = PHI_ARG_DEF (phi, 0);
1035 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1036 tmp = PHI_ARG_DEF (phi, 1);
1037 else
1038 return;
1039 if (TREE_CODE (tmp) != SSA_NAME
1040 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1041 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1042 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1043 return;
1044
1045 assign = SSA_NAME_DEF_STMT (tmp);
1046 assign_bb = gimple_bb (assign);
1047 if (!single_pred_p (assign_bb))
1048 return;
1049 expr = get_ancestor_addr_info (assign, &obj, &offset);
1050 if (!expr)
1051 return;
1052 parm = TREE_OPERAND (expr, 0);
1053 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1054 gcc_assert (index >= 0);
1055
1056 cond_bb = single_pred (assign_bb);
1057 cond = last_stmt (cond_bb);
1058 if (!cond
1059 || gimple_code (cond) != GIMPLE_COND
1060 || gimple_cond_code (cond) != NE_EXPR
1061 || gimple_cond_lhs (cond) != parm
1062 || !integer_zerop (gimple_cond_rhs (cond)))
1063 return;
1064
1065 phi_bb = gimple_bb (phi);
1066 for (i = 0; i < 2; i++)
1067 {
1068 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1069 if (pred != assign_bb && pred != cond_bb)
1070 return;
1071 }
1072
1073 if (!detect_type_change (obj, expr, call, jfunc, offset))
1074 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1075 parm_ref_data_pass_through_p (&parms_ainfo[index],
1076 call, parm));
1077 }
1078
1079 /* Given OP which is passed as an actual argument to a called function,
1080 determine if it is possible to construct a KNOWN_TYPE jump function for it
1081 and if so, create one and store it to JFUNC. */
1082
1083 static void
1084 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1085 gimple call)
1086 {
1087 HOST_WIDE_INT offset, size, max_size;
1088 tree base;
1089
1090 if (!flag_devirtualize
1091 || TREE_CODE (op) != ADDR_EXPR
1092 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1093 return;
1094
1095 op = TREE_OPERAND (op, 0);
1096 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1097 if (!DECL_P (base)
1098 || max_size == -1
1099 || max_size != size
1100 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1101 || is_global_var (base))
1102 return;
1103
1104 if (!TYPE_BINFO (TREE_TYPE (base))
1105 || detect_type_change (op, base, call, jfunc, offset))
1106 return;
1107
1108 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1109 }
1110
1111 /* Inspect the given TYPE and return true iff it has the same structure (the
1112 same number of fields of the same types) as a C++ member pointer. If
1113 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1114 corresponding fields there. */
1115
1116 static bool
1117 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1118 {
1119 tree fld;
1120
1121 if (TREE_CODE (type) != RECORD_TYPE)
1122 return false;
1123
1124 fld = TYPE_FIELDS (type);
1125 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1126 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1127 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1128 return false;
1129
1130 if (method_ptr)
1131 *method_ptr = fld;
1132
1133 fld = DECL_CHAIN (fld);
1134 if (!fld || INTEGRAL_TYPE_P (fld)
1135 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1136 return false;
1137 if (delta)
1138 *delta = fld;
1139
1140 if (DECL_CHAIN (fld))
1141 return false;
1142
1143 return true;
1144 }
1145
1146 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1147 return the rhs of its defining statement. Otherwise return RHS as it
1148 is. */
1149
1150 static inline tree
1151 get_ssa_def_if_simple_copy (tree rhs)
1152 {
1153 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1154 {
1155 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1156
1157 if (gimple_assign_single_p (def_stmt))
1158 rhs = gimple_assign_rhs1 (def_stmt);
1159 else
1160 break;
1161 }
1162 return rhs;
1163 }
1164
1165 /* Simple linked list, describing known contents of an aggregate beforere
1166 call. */
1167
1168 struct ipa_known_agg_contents_list
1169 {
1170 /* Offset and size of the described part of the aggregate. */
1171 HOST_WIDE_INT offset, size;
1172 /* Known constant value or NULL if the contents is known to be unknown. */
1173 tree constant;
1174 /* Pointer to the next structure in the list. */
1175 struct ipa_known_agg_contents_list *next;
1176 };
1177
1178 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1179 in ARG is filled in with constant values. ARG can either be an aggregate
1180 expression or a pointer to an aggregate. JFUNC is the jump function into
1181 which the constants are subsequently stored. */
1182
1183 static void
1184 determine_known_aggregate_parts (gimple call, tree arg,
1185 struct ipa_jump_func *jfunc)
1186 {
1187 struct ipa_known_agg_contents_list *list = NULL;
1188 int item_count = 0, const_count = 0;
1189 HOST_WIDE_INT arg_offset, arg_size;
1190 gimple_stmt_iterator gsi;
1191 tree arg_base;
1192 bool check_ref, by_ref;
1193 ao_ref r;
1194
1195 /* The function operates in three stages. First, we prepare check_ref, r,
1196 arg_base and arg_offset based on what is actually passed as an actual
1197 argument. */
1198
1199 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1200 {
1201 by_ref = true;
1202 if (TREE_CODE (arg) == SSA_NAME)
1203 {
1204 tree type_size;
1205 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1206 return;
1207 check_ref = true;
1208 arg_base = arg;
1209 arg_offset = 0;
1210 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1211 arg_size = tree_low_cst (type_size, 1);
1212 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1213 }
1214 else if (TREE_CODE (arg) == ADDR_EXPR)
1215 {
1216 HOST_WIDE_INT arg_max_size;
1217
1218 arg = TREE_OPERAND (arg, 0);
1219 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1220 &arg_max_size);
1221 if (arg_max_size == -1
1222 || arg_max_size != arg_size
1223 || arg_offset < 0)
1224 return;
1225 if (DECL_P (arg_base))
1226 {
1227 tree size;
1228 check_ref = false;
1229 size = build_int_cst (integer_type_node, arg_size);
1230 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1231 }
1232 else
1233 return;
1234 }
1235 else
1236 return;
1237 }
1238 else
1239 {
1240 HOST_WIDE_INT arg_max_size;
1241
1242 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1243
1244 by_ref = false;
1245 check_ref = false;
1246 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1247 &arg_max_size);
1248 if (arg_max_size == -1
1249 || arg_max_size != arg_size
1250 || arg_offset < 0)
1251 return;
1252
1253 ao_ref_init (&r, arg);
1254 }
1255
1256 /* Second stage walks back the BB, looks at individual statements and as long
1257 as it is confident of how the statements affect contents of the
1258 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1259 describing it. */
1260 gsi = gsi_for_stmt (call);
1261 gsi_prev (&gsi);
1262 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1263 {
1264 struct ipa_known_agg_contents_list *n, **p;
1265 gimple stmt = gsi_stmt (gsi);
1266 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1267 tree lhs, rhs, lhs_base;
1268 bool partial_overlap;
1269
1270 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1271 continue;
1272 if (!gimple_assign_single_p (stmt))
1273 break;
1274
1275 lhs = gimple_assign_lhs (stmt);
1276 rhs = gimple_assign_rhs1 (stmt);
1277 if (!is_gimple_reg_type (rhs))
1278 break;
1279
1280 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1281 &lhs_max_size);
1282 if (lhs_max_size == -1
1283 || lhs_max_size != lhs_size
1284 || (lhs_offset < arg_offset
1285 && lhs_offset + lhs_size > arg_offset)
1286 || (lhs_offset < arg_offset + arg_size
1287 && lhs_offset + lhs_size > arg_offset + arg_size))
1288 break;
1289
1290 if (check_ref)
1291 {
1292 if (TREE_CODE (lhs_base) != MEM_REF
1293 || TREE_OPERAND (lhs_base, 0) != arg_base
1294 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1295 break;
1296 }
1297 else if (lhs_base != arg_base)
1298 break;
1299
1300 if (lhs_offset + lhs_size < arg_offset
1301 || lhs_offset >= (arg_offset + arg_size))
1302 continue;
1303
1304 partial_overlap = false;
1305 p = &list;
1306 while (*p && (*p)->offset < lhs_offset)
1307 {
1308 if ((*p)->offset + (*p)->size > lhs_offset)
1309 {
1310 partial_overlap = true;
1311 break;
1312 }
1313 p = &(*p)->next;
1314 }
1315 if (partial_overlap)
1316 break;
1317 if (*p && (*p)->offset < lhs_offset + lhs_size)
1318 {
1319 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1320 /* We already know this value is subsequently overwritten with
1321 something else. */
1322 continue;
1323 else
1324 /* Otherwise this is a partial overlap which we cannot
1325 represent. */
1326 break;
1327 }
1328
1329 rhs = get_ssa_def_if_simple_copy (rhs);
1330 n = XALLOCA (struct ipa_known_agg_contents_list);
1331 n->size = lhs_size;
1332 n->offset = lhs_offset;
1333 if (is_gimple_ip_invariant (rhs))
1334 {
1335 n->constant = rhs;
1336 const_count++;
1337 }
1338 else
1339 n->constant = NULL_TREE;
1340 n->next = *p;
1341 *p = n;
1342
1343 item_count++;
1344 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1345 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1346 break;
1347 }
1348
1349 /* Third stage just goes over the list and creates an appropriate vector of
1350 ipa_agg_jf_item structures out of it, of sourse only if there are
1351 any known constants to begin with. */
1352
1353 if (const_count)
1354 {
1355 jfunc->agg.by_ref = by_ref;
1356 jfunc->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, const_count);
1357 while (list)
1358 {
1359 if (list->constant)
1360 {
1361 struct ipa_agg_jf_item item;
1362 item.offset = list->offset - arg_offset;
1363 item.value = prune_expression_for_jf (list->constant);
1364 VEC_quick_push (ipa_agg_jf_item_t, jfunc->agg.items, item);
1365 }
1366 list = list->next;
1367 }
1368 }
1369 }
1370
1371 /* Compute jump function for all arguments of callsite CS and insert the
1372 information in the jump_functions array in the ipa_edge_args corresponding
1373 to this callsite. */
1374
1375 static void
1376 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1377 struct cgraph_edge *cs)
1378 {
1379 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1380 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1381 gimple call = cs->call_stmt;
1382 int n, arg_num = gimple_call_num_args (call);
1383
1384 if (arg_num == 0 || args->jump_functions)
1385 return;
1386 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
1387
1388 for (n = 0; n < arg_num; n++)
1389 {
1390 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1391 tree arg = gimple_call_arg (call, n);
1392
1393 if (is_gimple_ip_invariant (arg))
1394 ipa_set_jf_constant (jfunc, arg);
1395 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1396 && TREE_CODE (arg) == PARM_DECL)
1397 {
1398 int index = ipa_get_param_decl_index (info, arg);
1399
1400 gcc_assert (index >=0);
1401 /* Aggregate passed by value, check for pass-through, otherwise we
1402 will attempt to fill in aggregate contents later in this
1403 for cycle. */
1404 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1405 {
1406 ipa_set_jf_simple_pass_through (jfunc, index, false);
1407 continue;
1408 }
1409 }
1410 else if (TREE_CODE (arg) == SSA_NAME)
1411 {
1412 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1413 {
1414 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1415 if (index >= 0
1416 && !detect_type_change_ssa (arg, call, jfunc))
1417 {
1418 bool agg_p;
1419 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1420 call, arg);
1421 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1422 }
1423 }
1424 else
1425 {
1426 gimple stmt = SSA_NAME_DEF_STMT (arg);
1427 if (is_gimple_assign (stmt))
1428 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1429 call, stmt, arg);
1430 else if (gimple_code (stmt) == GIMPLE_PHI)
1431 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1432 call, stmt);
1433 }
1434 }
1435 else
1436 compute_known_type_jump_func (arg, jfunc, call);
1437
1438 if ((jfunc->type != IPA_JF_PASS_THROUGH
1439 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1440 && (jfunc->type != IPA_JF_ANCESTOR
1441 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1442 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1443 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1444 determine_known_aggregate_parts (call, arg, jfunc);
1445 }
1446 }
1447
1448 /* Compute jump functions for all edges - both direct and indirect - outgoing
1449 from NODE. Also count the actual arguments in the process. */
1450
1451 static void
1452 ipa_compute_jump_functions (struct cgraph_node *node,
1453 struct param_analysis_info *parms_ainfo)
1454 {
1455 struct cgraph_edge *cs;
1456
1457 for (cs = node->callees; cs; cs = cs->next_callee)
1458 {
1459 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1460 NULL);
1461 /* We do not need to bother analyzing calls to unknown
1462 functions unless they may become known during lto/whopr. */
1463 if (!callee->analyzed && !flag_lto)
1464 continue;
1465 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1466 }
1467
1468 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1469 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1470 }
1471
1472 /* If STMT looks like a statement loading a value from a member pointer formal
1473 parameter, return that parameter and store the offset of the field to
1474 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1475 might be clobbered). If USE_DELTA, then we look for a use of the delta
1476 field rather than the pfn. */
1477
1478 static tree
1479 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1480 HOST_WIDE_INT *offset_p)
1481 {
1482 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1483
1484 if (!gimple_assign_single_p (stmt))
1485 return NULL_TREE;
1486
1487 rhs = gimple_assign_rhs1 (stmt);
1488 if (TREE_CODE (rhs) == COMPONENT_REF)
1489 {
1490 ref_field = TREE_OPERAND (rhs, 1);
1491 rhs = TREE_OPERAND (rhs, 0);
1492 }
1493 else
1494 ref_field = NULL_TREE;
1495 if (TREE_CODE (rhs) != MEM_REF)
1496 return NULL_TREE;
1497 rec = TREE_OPERAND (rhs, 0);
1498 if (TREE_CODE (rec) != ADDR_EXPR)
1499 return NULL_TREE;
1500 rec = TREE_OPERAND (rec, 0);
1501 if (TREE_CODE (rec) != PARM_DECL
1502 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1503 return NULL_TREE;
1504 ref_offset = TREE_OPERAND (rhs, 1);
1505
1506 if (use_delta)
1507 fld = delta_field;
1508 else
1509 fld = ptr_field;
1510 if (offset_p)
1511 *offset_p = int_bit_position (fld);
1512
1513 if (ref_field)
1514 {
1515 if (integer_nonzerop (ref_offset))
1516 return NULL_TREE;
1517 return ref_field == fld ? rec : NULL_TREE;
1518 }
1519 else
1520 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1521 : NULL_TREE;
1522 }
1523
1524 /* Returns true iff T is an SSA_NAME defined by a statement. */
1525
1526 static bool
1527 ipa_is_ssa_with_stmt_def (tree t)
1528 {
1529 if (TREE_CODE (t) == SSA_NAME
1530 && !SSA_NAME_IS_DEFAULT_DEF (t))
1531 return true;
1532 else
1533 return false;
1534 }
1535
1536 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1537 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1538 indirect call graph edge. */
1539
1540 static struct cgraph_edge *
1541 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1542 {
1543 struct cgraph_edge *cs;
1544
1545 cs = cgraph_edge (node, stmt);
1546 cs->indirect_info->param_index = param_index;
1547 cs->indirect_info->offset = 0;
1548 cs->indirect_info->polymorphic = 0;
1549 cs->indirect_info->agg_contents = 0;
1550 return cs;
1551 }
1552
1553 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1554 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1555 intermediate information about each formal parameter. Currently it checks
1556 whether the call calls a pointer that is a formal parameter and if so, the
1557 parameter is marked with the called flag and an indirect call graph edge
1558 describing the call is created. This is very simple for ordinary pointers
1559 represented in SSA but not-so-nice when it comes to member pointers. The
1560 ugly part of this function does nothing more than trying to match the
1561 pattern of such a call. An example of such a pattern is the gimple dump
1562 below, the call is on the last line:
1563
1564 <bb 2>:
1565 f$__delta_5 = f.__delta;
1566 f$__pfn_24 = f.__pfn;
1567
1568 or
1569 <bb 2>:
1570 f$__delta_5 = MEM[(struct *)&f];
1571 f$__pfn_24 = MEM[(struct *)&f + 4B];
1572
1573 and a few lines below:
1574
1575 <bb 5>
1576 D.2496_3 = (int) f$__pfn_24;
1577 D.2497_4 = D.2496_3 & 1;
1578 if (D.2497_4 != 0)
1579 goto <bb 3>;
1580 else
1581 goto <bb 4>;
1582
1583 <bb 6>:
1584 D.2500_7 = (unsigned int) f$__delta_5;
1585 D.2501_8 = &S + D.2500_7;
1586 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1587 D.2503_10 = *D.2502_9;
1588 D.2504_12 = f$__pfn_24 + -1;
1589 D.2505_13 = (unsigned int) D.2504_12;
1590 D.2506_14 = D.2503_10 + D.2505_13;
1591 D.2507_15 = *D.2506_14;
1592 iftmp.11_16 = (String:: *) D.2507_15;
1593
1594 <bb 7>:
1595 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1596 D.2500_19 = (unsigned int) f$__delta_5;
1597 D.2508_20 = &S + D.2500_19;
1598 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1599
1600 Such patterns are results of simple calls to a member pointer:
1601
1602 int doprinting (int (MyString::* f)(int) const)
1603 {
1604 MyString S ("somestring");
1605
1606 return (S.*f)(4);
1607 }
1608
1609 Moreover, the function also looks for called pointers loaded from aggregates
1610 passed by value or reference. */
1611
1612 static void
1613 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1614 struct ipa_node_params *info,
1615 struct param_analysis_info *parms_ainfo,
1616 gimple call, tree target)
1617 {
1618 gimple def;
1619 tree n1, n2;
1620 gimple d1, d2;
1621 tree rec, rec2, cond;
1622 gimple branch;
1623 int index;
1624 basic_block bb, virt_bb, join;
1625 HOST_WIDE_INT offset;
1626 bool by_ref;
1627
1628 if (SSA_NAME_IS_DEFAULT_DEF (target))
1629 {
1630 tree var = SSA_NAME_VAR (target);
1631 index = ipa_get_param_decl_index (info, var);
1632 if (index >= 0)
1633 ipa_note_param_call (node, index, call);
1634 return;
1635 }
1636
1637 def = SSA_NAME_DEF_STMT (target);
1638 if (gimple_assign_single_p (def)
1639 && ipa_load_from_parm_agg_1 (info, parms_ainfo, def,
1640 gimple_assign_rhs1 (def), &index, &offset,
1641 &by_ref))
1642 {
1643 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1644 cs->indirect_info->offset = offset;
1645 cs->indirect_info->agg_contents = 1;
1646 cs->indirect_info->by_ref = by_ref;
1647 return;
1648 }
1649
1650 /* Now we need to try to match the complex pattern of calling a member
1651 pointer. */
1652 if (gimple_code (def) != GIMPLE_PHI
1653 || gimple_phi_num_args (def) != 2
1654 || !POINTER_TYPE_P (TREE_TYPE (target))
1655 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1656 return;
1657
1658 /* First, we need to check whether one of these is a load from a member
1659 pointer that is a parameter to this function. */
1660 n1 = PHI_ARG_DEF (def, 0);
1661 n2 = PHI_ARG_DEF (def, 1);
1662 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1663 return;
1664 d1 = SSA_NAME_DEF_STMT (n1);
1665 d2 = SSA_NAME_DEF_STMT (n2);
1666
1667 join = gimple_bb (def);
1668 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1669 {
1670 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1671 return;
1672
1673 bb = EDGE_PRED (join, 0)->src;
1674 virt_bb = gimple_bb (d2);
1675 }
1676 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1677 {
1678 bb = EDGE_PRED (join, 1)->src;
1679 virt_bb = gimple_bb (d1);
1680 }
1681 else
1682 return;
1683
1684 /* Second, we need to check that the basic blocks are laid out in the way
1685 corresponding to the pattern. */
1686
1687 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1688 || single_pred (virt_bb) != bb
1689 || single_succ (virt_bb) != join)
1690 return;
1691
1692 /* Third, let's see that the branching is done depending on the least
1693 significant bit of the pfn. */
1694
1695 branch = last_stmt (bb);
1696 if (!branch || gimple_code (branch) != GIMPLE_COND)
1697 return;
1698
1699 if ((gimple_cond_code (branch) != NE_EXPR
1700 && gimple_cond_code (branch) != EQ_EXPR)
1701 || !integer_zerop (gimple_cond_rhs (branch)))
1702 return;
1703
1704 cond = gimple_cond_lhs (branch);
1705 if (!ipa_is_ssa_with_stmt_def (cond))
1706 return;
1707
1708 def = SSA_NAME_DEF_STMT (cond);
1709 if (!is_gimple_assign (def)
1710 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1711 || !integer_onep (gimple_assign_rhs2 (def)))
1712 return;
1713
1714 cond = gimple_assign_rhs1 (def);
1715 if (!ipa_is_ssa_with_stmt_def (cond))
1716 return;
1717
1718 def = SSA_NAME_DEF_STMT (cond);
1719
1720 if (is_gimple_assign (def)
1721 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1722 {
1723 cond = gimple_assign_rhs1 (def);
1724 if (!ipa_is_ssa_with_stmt_def (cond))
1725 return;
1726 def = SSA_NAME_DEF_STMT (cond);
1727 }
1728
1729 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1730 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1731 == ptrmemfunc_vbit_in_delta),
1732 NULL);
1733 if (rec != rec2)
1734 return;
1735
1736 index = ipa_get_param_decl_index (info, rec);
1737 if (index >= 0
1738 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1739 {
1740 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1741 cs->indirect_info->offset = offset;
1742 cs->indirect_info->agg_contents = 1;
1743 }
1744
1745 return;
1746 }
1747
1748 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1749 object referenced in the expression is a formal parameter of the caller
1750 (described by INFO), create a call note for the statement. */
1751
1752 static void
1753 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1754 struct ipa_node_params *info, gimple call,
1755 tree target)
1756 {
1757 struct cgraph_edge *cs;
1758 struct cgraph_indirect_call_info *ii;
1759 struct ipa_jump_func jfunc;
1760 tree obj = OBJ_TYPE_REF_OBJECT (target);
1761 int index;
1762 HOST_WIDE_INT anc_offset;
1763
1764 if (!flag_devirtualize)
1765 return;
1766
1767 if (TREE_CODE (obj) != SSA_NAME)
1768 return;
1769
1770 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1771 {
1772 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1773 return;
1774
1775 anc_offset = 0;
1776 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1777 gcc_assert (index >= 0);
1778 if (detect_type_change_ssa (obj, call, &jfunc))
1779 return;
1780 }
1781 else
1782 {
1783 gimple stmt = SSA_NAME_DEF_STMT (obj);
1784 tree expr;
1785
1786 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1787 if (!expr)
1788 return;
1789 index = ipa_get_param_decl_index (info,
1790 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1791 gcc_assert (index >= 0);
1792 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1793 return;
1794 }
1795
1796 cs = ipa_note_param_call (node, index, call);
1797 ii = cs->indirect_info;
1798 ii->offset = anc_offset;
1799 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1800 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1801 ii->polymorphic = 1;
1802 }
1803
1804 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1805 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1806 containing intermediate information about each formal parameter. */
1807
1808 static void
1809 ipa_analyze_call_uses (struct cgraph_node *node,
1810 struct ipa_node_params *info,
1811 struct param_analysis_info *parms_ainfo, gimple call)
1812 {
1813 tree target = gimple_call_fn (call);
1814
1815 if (!target)
1816 return;
1817 if (TREE_CODE (target) == SSA_NAME)
1818 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1819 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1820 ipa_analyze_virtual_call_uses (node, info, call, target);
1821 }
1822
1823
1824 /* Analyze the call statement STMT with respect to formal parameters (described
1825 in INFO) of caller given by NODE. Currently it only checks whether formal
1826 parameters are called. PARMS_AINFO is a pointer to a vector containing
1827 intermediate information about each formal parameter. */
1828
1829 static void
1830 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1831 struct param_analysis_info *parms_ainfo, gimple stmt)
1832 {
1833 if (is_gimple_call (stmt))
1834 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1835 }
1836
1837 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1838 If OP is a parameter declaration, mark it as used in the info structure
1839 passed in DATA. */
1840
1841 static bool
1842 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1843 tree op, void *data)
1844 {
1845 struct ipa_node_params *info = (struct ipa_node_params *) data;
1846
1847 op = get_base_address (op);
1848 if (op
1849 && TREE_CODE (op) == PARM_DECL)
1850 {
1851 int index = ipa_get_param_decl_index (info, op);
1852 gcc_assert (index >= 0);
1853 ipa_set_param_used (info, index, true);
1854 }
1855
1856 return false;
1857 }
1858
1859 /* Scan the function body of NODE and inspect the uses of formal parameters.
1860 Store the findings in various structures of the associated ipa_node_params
1861 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1862 vector containing intermediate information about each formal parameter. */
1863
1864 static void
1865 ipa_analyze_params_uses (struct cgraph_node *node,
1866 struct param_analysis_info *parms_ainfo)
1867 {
1868 tree decl = node->symbol.decl;
1869 basic_block bb;
1870 struct function *func;
1871 gimple_stmt_iterator gsi;
1872 struct ipa_node_params *info = IPA_NODE_REF (node);
1873 int i;
1874
1875 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1876 return;
1877
1878 for (i = 0; i < ipa_get_param_count (info); i++)
1879 {
1880 tree parm = ipa_get_param (info, i);
1881 tree ddef;
1882 /* For SSA regs see if parameter is used. For non-SSA we compute
1883 the flag during modification analysis. */
1884 if (is_gimple_reg (parm)
1885 && (ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1886 parm)) != NULL_TREE
1887 && !has_zero_uses (ddef))
1888 ipa_set_param_used (info, i, true);
1889 }
1890
1891 func = DECL_STRUCT_FUNCTION (decl);
1892 FOR_EACH_BB_FN (bb, func)
1893 {
1894 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1895 {
1896 gimple stmt = gsi_stmt (gsi);
1897
1898 if (is_gimple_debug (stmt))
1899 continue;
1900
1901 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1902 walk_stmt_load_store_addr_ops (stmt, info,
1903 visit_ref_for_mod_analysis,
1904 visit_ref_for_mod_analysis,
1905 visit_ref_for_mod_analysis);
1906 }
1907 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1908 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1909 visit_ref_for_mod_analysis,
1910 visit_ref_for_mod_analysis,
1911 visit_ref_for_mod_analysis);
1912 }
1913
1914 info->uses_analysis_done = 1;
1915 }
1916
1917 /* Initialize the array describing properties of of formal parameters
1918 of NODE, analyze their uses and compute jump functions associated
1919 with actual arguments of calls from within NODE. */
1920
1921 void
1922 ipa_analyze_node (struct cgraph_node *node)
1923 {
1924 struct ipa_node_params *info;
1925 struct param_analysis_info *parms_ainfo;
1926 int i, param_count;
1927
1928 ipa_check_create_node_params ();
1929 ipa_check_create_edge_args ();
1930 info = IPA_NODE_REF (node);
1931 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
1932 current_function_decl = node->symbol.decl;
1933 ipa_initialize_node_params (node);
1934
1935 param_count = ipa_get_param_count (info);
1936 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1937 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1938
1939 ipa_analyze_params_uses (node, parms_ainfo);
1940 ipa_compute_jump_functions (node, parms_ainfo);
1941
1942 for (i = 0; i < param_count; i++)
1943 {
1944 if (parms_ainfo[i].parm_visited_statements)
1945 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
1946 if (parms_ainfo[i].pt_visited_statements)
1947 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
1948 }
1949
1950 current_function_decl = NULL;
1951 pop_cfun ();
1952 }
1953
1954
1955 /* Update the jump function DST when the call graph edge corresponding to SRC is
1956 is being inlined, knowing that DST is of type ancestor and src of known
1957 type. */
1958
1959 static void
1960 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1961 struct ipa_jump_func *dst)
1962 {
1963 HOST_WIDE_INT combined_offset;
1964 tree combined_type;
1965
1966 combined_offset = ipa_get_jf_known_type_offset (src)
1967 + ipa_get_jf_ancestor_offset (dst);
1968 combined_type = ipa_get_jf_ancestor_type (dst);
1969
1970 ipa_set_jf_known_type (dst, combined_offset,
1971 ipa_get_jf_known_type_base_type (src),
1972 combined_type);
1973 }
1974
1975 /* Update the jump functions associated with call graph edge E when the call
1976 graph edge CS is being inlined, assuming that E->caller is already (possibly
1977 indirectly) inlined into CS->callee and that E has not been inlined. */
1978
1979 static void
1980 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1981 struct cgraph_edge *e)
1982 {
1983 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1984 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1985 int count = ipa_get_cs_argument_count (args);
1986 int i;
1987
1988 for (i = 0; i < count; i++)
1989 {
1990 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1991
1992 if (dst->type == IPA_JF_ANCESTOR)
1993 {
1994 struct ipa_jump_func *src;
1995 int dst_fid = dst->value.ancestor.formal_id;
1996
1997 /* Variable number of arguments can cause havoc if we try to access
1998 one that does not exist in the inlined edge. So make sure we
1999 don't. */
2000 if (dst_fid >= ipa_get_cs_argument_count (top))
2001 {
2002 dst->type = IPA_JF_UNKNOWN;
2003 continue;
2004 }
2005
2006 src = ipa_get_ith_jump_func (top, dst_fid);
2007
2008 if (src->agg.items
2009 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2010 {
2011 struct ipa_agg_jf_item *item;
2012 int j;
2013
2014 /* Currently we do not produce clobber aggregate jump functions,
2015 replace with merging when we do. */
2016 gcc_assert (!dst->agg.items);
2017
2018 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc, src->agg.items);
2019 dst->agg.by_ref = src->agg.by_ref;
2020 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, dst->agg.items, j, item)
2021 item->offset -= dst->value.ancestor.offset;
2022 }
2023
2024 if (src->type == IPA_JF_KNOWN_TYPE)
2025 combine_known_type_and_ancestor_jfs (src, dst);
2026 else if (src->type == IPA_JF_PASS_THROUGH
2027 && src->value.pass_through.operation == NOP_EXPR)
2028 {
2029 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2030 dst->value.ancestor.agg_preserved &=
2031 src->value.pass_through.agg_preserved;
2032 }
2033 else if (src->type == IPA_JF_ANCESTOR)
2034 {
2035 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2036 dst->value.ancestor.offset += src->value.ancestor.offset;
2037 dst->value.ancestor.agg_preserved &=
2038 src->value.ancestor.agg_preserved;
2039 }
2040 else
2041 dst->type = IPA_JF_UNKNOWN;
2042 }
2043 else if (dst->type == IPA_JF_PASS_THROUGH)
2044 {
2045 struct ipa_jump_func *src;
2046 /* We must check range due to calls with variable number of arguments
2047 and we cannot combine jump functions with operations. */
2048 if (dst->value.pass_through.operation == NOP_EXPR
2049 && (dst->value.pass_through.formal_id
2050 < ipa_get_cs_argument_count (top)))
2051 {
2052 bool agg_p;
2053 int dst_fid = dst->value.pass_through.formal_id;
2054 src = ipa_get_ith_jump_func (top, dst_fid);
2055 agg_p = dst->value.pass_through.agg_preserved;
2056
2057 dst->type = src->type;
2058 dst->value = src->value;
2059
2060 if (src->agg.items
2061 && (agg_p || !src->agg.by_ref))
2062 {
2063 /* Currently we do not produce clobber aggregate jump
2064 functions, replace with merging when we do. */
2065 gcc_assert (!dst->agg.items);
2066
2067 dst->agg.by_ref = src->agg.by_ref;
2068 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc,
2069 src->agg.items);
2070 }
2071
2072 if (!agg_p)
2073 {
2074 if (dst->type == IPA_JF_PASS_THROUGH)
2075 dst->value.pass_through.agg_preserved = false;
2076 else if (dst->type == IPA_JF_ANCESTOR)
2077 dst->value.ancestor.agg_preserved = false;
2078 }
2079 }
2080 else
2081 dst->type = IPA_JF_UNKNOWN;
2082 }
2083 }
2084 }
2085
2086 /* If TARGET is an addr_expr of a function declaration, make it the destination
2087 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2088
2089 struct cgraph_edge *
2090 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2091 {
2092 struct cgraph_node *callee;
2093
2094 if (TREE_CODE (target) == ADDR_EXPR)
2095 target = TREE_OPERAND (target, 0);
2096 if (TREE_CODE (target) != FUNCTION_DECL)
2097 return NULL;
2098 callee = cgraph_get_node (target);
2099 if (!callee)
2100 return NULL;
2101 ipa_check_create_node_params ();
2102
2103 /* We can not make edges to inline clones. It is bug that someone removed
2104 the cgraph node too early. */
2105 gcc_assert (!callee->global.inlined_to);
2106
2107 cgraph_make_edge_direct (ie, callee);
2108 if (dump_file)
2109 {
2110 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2111 "(%s/%i -> %s/%i), for stmt ",
2112 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2113 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
2114 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
2115 if (ie->call_stmt)
2116 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2117 else
2118 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2119 }
2120 callee = cgraph_function_or_thunk_node (callee, NULL);
2121
2122 return ie;
2123 }
2124
2125 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2126 return NULL if there is not any. BY_REF specifies whether the value has to
2127 be passed by reference or by value. */
2128
2129 tree
2130 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2131 HOST_WIDE_INT offset, bool by_ref)
2132 {
2133 struct ipa_agg_jf_item *item;
2134 int i;
2135
2136 if (by_ref != agg->by_ref)
2137 return NULL;
2138
2139 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, agg->items, i, item)
2140 {
2141 if (item->offset == offset)
2142 {
2143 /* Currently we do not have clobber values, return NULL for them once
2144 we do. */
2145 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2146 return item->value;
2147 }
2148 else if (item->offset > offset)
2149 return NULL;
2150 }
2151 return NULL;
2152 }
2153
2154 /* Try to find a destination for indirect edge IE that corresponds to a simple
2155 call or a call of a member function pointer and where the destination is a
2156 pointer formal parameter described by jump function JFUNC. If it can be
2157 determined, return the newly direct edge, otherwise return NULL. */
2158
2159 static struct cgraph_edge *
2160 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2161 struct ipa_jump_func *jfunc)
2162 {
2163 tree target;
2164
2165 if (ie->indirect_info->agg_contents)
2166 {
2167 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2168 ie->indirect_info->offset,
2169 ie->indirect_info->by_ref);
2170 if (!target)
2171 return NULL;
2172 }
2173 else
2174 {
2175 if (jfunc->type != IPA_JF_CONST)
2176 return NULL;
2177 target = ipa_get_jf_constant (jfunc);
2178 }
2179 return ipa_make_edge_direct_to_target (ie, target);
2180 }
2181
2182 /* Try to find a destination for indirect edge IE that corresponds to a
2183 virtual call based on a formal parameter which is described by jump
2184 function JFUNC and if it can be determined, make it direct and return the
2185 direct edge. Otherwise, return NULL. */
2186
2187 static struct cgraph_edge *
2188 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2189 struct ipa_jump_func *jfunc)
2190 {
2191 tree binfo, target;
2192
2193 if (jfunc->type != IPA_JF_KNOWN_TYPE)
2194 return NULL;
2195
2196 binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
2197 gcc_checking_assert (binfo);
2198 binfo = get_binfo_at_offset (binfo, ipa_get_jf_known_type_offset (jfunc)
2199 + ie->indirect_info->offset,
2200 ie->indirect_info->otr_type);
2201 if (binfo)
2202 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2203 binfo);
2204 else
2205 return NULL;
2206
2207 if (target)
2208 return ipa_make_edge_direct_to_target (ie, target);
2209 else
2210 return NULL;
2211 }
2212
2213 /* Update the param called notes associated with NODE when CS is being inlined,
2214 assuming NODE is (potentially indirectly) inlined into CS->callee.
2215 Moreover, if the callee is discovered to be constant, create a new cgraph
2216 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2217 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2218
2219 static bool
2220 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2221 struct cgraph_node *node,
2222 VEC (cgraph_edge_p, heap) **new_edges)
2223 {
2224 struct ipa_edge_args *top;
2225 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2226 bool res = false;
2227
2228 ipa_check_create_edge_args ();
2229 top = IPA_EDGE_REF (cs);
2230
2231 for (ie = node->indirect_calls; ie; ie = next_ie)
2232 {
2233 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2234 struct ipa_jump_func *jfunc;
2235 int param_index;
2236
2237 next_ie = ie->next_callee;
2238
2239 if (ici->param_index == -1)
2240 continue;
2241
2242 /* We must check range due to calls with variable number of arguments: */
2243 if (ici->param_index >= ipa_get_cs_argument_count (top))
2244 {
2245 ici->param_index = -1;
2246 continue;
2247 }
2248
2249 param_index = ici->param_index;
2250 jfunc = ipa_get_ith_jump_func (top, param_index);
2251 if (jfunc->type == IPA_JF_PASS_THROUGH
2252 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2253 {
2254 if (ici->agg_contents
2255 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2256 ici->param_index = -1;
2257 else
2258 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2259 }
2260 else if (jfunc->type == IPA_JF_ANCESTOR)
2261 {
2262 if (ici->agg_contents
2263 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2264 ici->param_index = -1;
2265 else
2266 {
2267 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2268 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2269 }
2270 }
2271 else
2272 /* Either we can find a destination for this edge now or never. */
2273 ici->param_index = -1;
2274
2275 if (!flag_indirect_inlining)
2276 continue;
2277
2278 if (ici->polymorphic)
2279 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
2280 else
2281 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
2282
2283 if (new_direct_edge)
2284 {
2285 new_direct_edge->indirect_inlining_edge = 1;
2286 if (new_direct_edge->call_stmt)
2287 new_direct_edge->call_stmt_cannot_inline_p
2288 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
2289 new_direct_edge->callee->symbol.decl);
2290 if (new_edges)
2291 {
2292 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
2293 new_direct_edge);
2294 top = IPA_EDGE_REF (cs);
2295 res = true;
2296 }
2297 }
2298 }
2299
2300 return res;
2301 }
2302
2303 /* Recursively traverse subtree of NODE (including node) made of inlined
2304 cgraph_edges when CS has been inlined and invoke
2305 update_indirect_edges_after_inlining on all nodes and
2306 update_jump_functions_after_inlining on all non-inlined edges that lead out
2307 of this subtree. Newly discovered indirect edges will be added to
2308 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2309 created. */
2310
2311 static bool
2312 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2313 struct cgraph_node *node,
2314 VEC (cgraph_edge_p, heap) **new_edges)
2315 {
2316 struct cgraph_edge *e;
2317 bool res;
2318
2319 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2320
2321 for (e = node->callees; e; e = e->next_callee)
2322 if (!e->inline_failed)
2323 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2324 else
2325 update_jump_functions_after_inlining (cs, e);
2326 for (e = node->indirect_calls; e; e = e->next_callee)
2327 update_jump_functions_after_inlining (cs, e);
2328
2329 return res;
2330 }
2331
2332 /* Update jump functions and call note functions on inlining the call site CS.
2333 CS is expected to lead to a node already cloned by
2334 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2335 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2336 created. */
2337
2338 bool
2339 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2340 VEC (cgraph_edge_p, heap) **new_edges)
2341 {
2342 bool changed;
2343 /* Do nothing if the preparation phase has not been carried out yet
2344 (i.e. during early inlining). */
2345 if (!ipa_node_params_vector)
2346 return false;
2347 gcc_assert (ipa_edge_args_vector);
2348
2349 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2350
2351 /* We do not keep jump functions of inlined edges up to date. Better to free
2352 them so we do not access them accidentally. */
2353 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2354 return changed;
2355 }
2356
2357 /* Frees all dynamically allocated structures that the argument info points
2358 to. */
2359
2360 void
2361 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2362 {
2363 if (args->jump_functions)
2364 ggc_free (args->jump_functions);
2365
2366 memset (args, 0, sizeof (*args));
2367 }
2368
2369 /* Free all ipa_edge structures. */
2370
2371 void
2372 ipa_free_all_edge_args (void)
2373 {
2374 int i;
2375 struct ipa_edge_args *args;
2376
2377 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
2378 ipa_free_edge_args_substructures (args);
2379
2380 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
2381 ipa_edge_args_vector = NULL;
2382 }
2383
2384 /* Frees all dynamically allocated structures that the param info points
2385 to. */
2386
2387 void
2388 ipa_free_node_params_substructures (struct ipa_node_params *info)
2389 {
2390 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
2391 free (info->lattices);
2392 /* Lattice values and their sources are deallocated with their alocation
2393 pool. */
2394 VEC_free (tree, heap, info->known_vals);
2395 memset (info, 0, sizeof (*info));
2396 }
2397
2398 /* Free all ipa_node_params structures. */
2399
2400 void
2401 ipa_free_all_node_params (void)
2402 {
2403 int i;
2404 struct ipa_node_params *info;
2405
2406 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
2407 ipa_free_node_params_substructures (info);
2408
2409 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
2410 ipa_node_params_vector = NULL;
2411 }
2412
2413 /* Hook that is called by cgraph.c when an edge is removed. */
2414
2415 static void
2416 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2417 {
2418 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2419 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
2420 <= (unsigned)cs->uid)
2421 return;
2422 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2423 }
2424
2425 /* Hook that is called by cgraph.c when a node is removed. */
2426
2427 static void
2428 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2429 {
2430 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2431 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
2432 <= (unsigned)node->uid)
2433 return;
2434 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2435 }
2436
2437 /* Hook that is called by cgraph.c when an edge is duplicated. */
2438
2439 static void
2440 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2441 __attribute__((unused)) void *data)
2442 {
2443 struct ipa_edge_args *old_args, *new_args;
2444 unsigned int i;
2445
2446 ipa_check_create_edge_args ();
2447
2448 old_args = IPA_EDGE_REF (src);
2449 new_args = IPA_EDGE_REF (dst);
2450
2451 new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
2452 old_args->jump_functions);
2453
2454 for (i = 0; i < VEC_length (ipa_jump_func_t, old_args->jump_functions); i++)
2455 VEC_index (ipa_jump_func_t, new_args->jump_functions, i).agg.items
2456 = VEC_copy (ipa_agg_jf_item_t, gc,
2457 VEC_index (ipa_jump_func_t,
2458 old_args->jump_functions, i).agg.items);
2459 }
2460
2461 /* Hook that is called by cgraph.c when a node is duplicated. */
2462
2463 static void
2464 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2465 ATTRIBUTE_UNUSED void *data)
2466 {
2467 struct ipa_node_params *old_info, *new_info;
2468
2469 ipa_check_create_node_params ();
2470 old_info = IPA_NODE_REF (src);
2471 new_info = IPA_NODE_REF (dst);
2472
2473 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
2474 old_info->descriptors);
2475 new_info->lattices = NULL;
2476 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2477
2478 new_info->uses_analysis_done = old_info->uses_analysis_done;
2479 new_info->node_enqueued = old_info->node_enqueued;
2480 }
2481
2482
2483 /* Analyze newly added function into callgraph. */
2484
2485 static void
2486 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2487 {
2488 ipa_analyze_node (node);
2489 }
2490
2491 /* Register our cgraph hooks if they are not already there. */
2492
2493 void
2494 ipa_register_cgraph_hooks (void)
2495 {
2496 if (!edge_removal_hook_holder)
2497 edge_removal_hook_holder =
2498 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2499 if (!node_removal_hook_holder)
2500 node_removal_hook_holder =
2501 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2502 if (!edge_duplication_hook_holder)
2503 edge_duplication_hook_holder =
2504 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2505 if (!node_duplication_hook_holder)
2506 node_duplication_hook_holder =
2507 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2508 function_insertion_hook_holder =
2509 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2510 }
2511
2512 /* Unregister our cgraph hooks if they are not already there. */
2513
2514 static void
2515 ipa_unregister_cgraph_hooks (void)
2516 {
2517 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2518 edge_removal_hook_holder = NULL;
2519 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2520 node_removal_hook_holder = NULL;
2521 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2522 edge_duplication_hook_holder = NULL;
2523 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2524 node_duplication_hook_holder = NULL;
2525 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2526 function_insertion_hook_holder = NULL;
2527 }
2528
2529 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2530 longer needed after ipa-cp. */
2531
2532 void
2533 ipa_free_all_structures_after_ipa_cp (void)
2534 {
2535 if (!optimize)
2536 {
2537 ipa_free_all_edge_args ();
2538 ipa_free_all_node_params ();
2539 free_alloc_pool (ipcp_sources_pool);
2540 free_alloc_pool (ipcp_values_pool);
2541 ipa_unregister_cgraph_hooks ();
2542 }
2543 }
2544
2545 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2546 longer needed after indirect inlining. */
2547
2548 void
2549 ipa_free_all_structures_after_iinln (void)
2550 {
2551 ipa_free_all_edge_args ();
2552 ipa_free_all_node_params ();
2553 ipa_unregister_cgraph_hooks ();
2554 if (ipcp_sources_pool)
2555 free_alloc_pool (ipcp_sources_pool);
2556 if (ipcp_values_pool)
2557 free_alloc_pool (ipcp_values_pool);
2558 }
2559
2560 /* Print ipa_tree_map data structures of all functions in the
2561 callgraph to F. */
2562
2563 void
2564 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2565 {
2566 int i, count;
2567 tree temp;
2568 struct ipa_node_params *info;
2569
2570 if (!node->analyzed)
2571 return;
2572 info = IPA_NODE_REF (node);
2573 fprintf (f, " function %s parameter descriptors:\n",
2574 cgraph_node_name (node));
2575 count = ipa_get_param_count (info);
2576 for (i = 0; i < count; i++)
2577 {
2578 temp = ipa_get_param (info, i);
2579 if (TREE_CODE (temp) == PARM_DECL)
2580 fprintf (f, " param %d : %s", i,
2581 (DECL_NAME (temp)
2582 ? (*lang_hooks.decl_printable_name) (temp, 2)
2583 : "(unnamed)"));
2584 if (ipa_is_param_used (info, i))
2585 fprintf (f, " used");
2586 fprintf (f, "\n");
2587 }
2588 }
2589
2590 /* Print ipa_tree_map data structures of all functions in the
2591 callgraph to F. */
2592
2593 void
2594 ipa_print_all_params (FILE * f)
2595 {
2596 struct cgraph_node *node;
2597
2598 fprintf (f, "\nFunction parameters:\n");
2599 FOR_EACH_FUNCTION (node)
2600 ipa_print_node_params (f, node);
2601 }
2602
2603 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2604
2605 VEC(tree, heap) *
2606 ipa_get_vector_of_formal_parms (tree fndecl)
2607 {
2608 VEC(tree, heap) *args;
2609 int count;
2610 tree parm;
2611
2612 count = count_formal_params (fndecl);
2613 args = VEC_alloc (tree, heap, count);
2614 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2615 VEC_quick_push (tree, args, parm);
2616
2617 return args;
2618 }
2619
2620 /* Return a heap allocated vector containing types of formal parameters of
2621 function type FNTYPE. */
2622
2623 static inline VEC(tree, heap) *
2624 get_vector_of_formal_parm_types (tree fntype)
2625 {
2626 VEC(tree, heap) *types;
2627 int count = 0;
2628 tree t;
2629
2630 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2631 count++;
2632
2633 types = VEC_alloc (tree, heap, count);
2634 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2635 VEC_quick_push (tree, types, TREE_VALUE (t));
2636
2637 return types;
2638 }
2639
2640 /* Modify the function declaration FNDECL and its type according to the plan in
2641 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2642 to reflect the actual parameters being modified which are determined by the
2643 base_index field. */
2644
2645 void
2646 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2647 const char *synth_parm_prefix)
2648 {
2649 VEC(tree, heap) *oparms, *otypes;
2650 tree orig_type, new_type = NULL;
2651 tree old_arg_types, t, new_arg_types = NULL;
2652 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2653 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2654 tree new_reversed = NULL;
2655 bool care_for_types, last_parm_void;
2656
2657 if (!synth_parm_prefix)
2658 synth_parm_prefix = "SYNTH";
2659
2660 oparms = ipa_get_vector_of_formal_parms (fndecl);
2661 orig_type = TREE_TYPE (fndecl);
2662 old_arg_types = TYPE_ARG_TYPES (orig_type);
2663
2664 /* The following test is an ugly hack, some functions simply don't have any
2665 arguments in their type. This is probably a bug but well... */
2666 care_for_types = (old_arg_types != NULL_TREE);
2667 if (care_for_types)
2668 {
2669 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2670 == void_type_node);
2671 otypes = get_vector_of_formal_parm_types (orig_type);
2672 if (last_parm_void)
2673 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2674 else
2675 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2676 }
2677 else
2678 {
2679 last_parm_void = false;
2680 otypes = NULL;
2681 }
2682
2683 for (i = 0; i < len; i++)
2684 {
2685 struct ipa_parm_adjustment *adj;
2686 gcc_assert (link);
2687
2688 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2689 parm = VEC_index (tree, oparms, adj->base_index);
2690 adj->base = parm;
2691
2692 if (adj->copy_param)
2693 {
2694 if (care_for_types)
2695 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2696 adj->base_index),
2697 new_arg_types);
2698 *link = parm;
2699 link = &DECL_CHAIN (parm);
2700 }
2701 else if (!adj->remove_param)
2702 {
2703 tree new_parm;
2704 tree ptype;
2705
2706 if (adj->by_ref)
2707 ptype = build_pointer_type (adj->type);
2708 else
2709 ptype = adj->type;
2710
2711 if (care_for_types)
2712 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2713
2714 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2715 ptype);
2716 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2717
2718 DECL_ARTIFICIAL (new_parm) = 1;
2719 DECL_ARG_TYPE (new_parm) = ptype;
2720 DECL_CONTEXT (new_parm) = fndecl;
2721 TREE_USED (new_parm) = 1;
2722 DECL_IGNORED_P (new_parm) = 1;
2723 layout_decl (new_parm, 0);
2724
2725 adj->base = parm;
2726 adj->reduction = new_parm;
2727
2728 *link = new_parm;
2729
2730 link = &DECL_CHAIN (new_parm);
2731 }
2732 }
2733
2734 *link = NULL_TREE;
2735
2736 if (care_for_types)
2737 {
2738 new_reversed = nreverse (new_arg_types);
2739 if (last_parm_void)
2740 {
2741 if (new_reversed)
2742 TREE_CHAIN (new_arg_types) = void_list_node;
2743 else
2744 new_reversed = void_list_node;
2745 }
2746 }
2747
2748 /* Use copy_node to preserve as much as possible from original type
2749 (debug info, attribute lists etc.)
2750 Exception is METHOD_TYPEs must have THIS argument.
2751 When we are asked to remove it, we need to build new FUNCTION_TYPE
2752 instead. */
2753 if (TREE_CODE (orig_type) != METHOD_TYPE
2754 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0).copy_param
2755 && VEC_index (ipa_parm_adjustment_t, adjustments, 0).base_index == 0))
2756 {
2757 new_type = build_distinct_type_copy (orig_type);
2758 TYPE_ARG_TYPES (new_type) = new_reversed;
2759 }
2760 else
2761 {
2762 new_type
2763 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2764 new_reversed));
2765 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2766 DECL_VINDEX (fndecl) = NULL_TREE;
2767 }
2768
2769 /* When signature changes, we need to clear builtin info. */
2770 if (DECL_BUILT_IN (fndecl))
2771 {
2772 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2773 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2774 }
2775
2776 /* This is a new type, not a copy of an old type. Need to reassociate
2777 variants. We can handle everything except the main variant lazily. */
2778 t = TYPE_MAIN_VARIANT (orig_type);
2779 if (orig_type != t)
2780 {
2781 TYPE_MAIN_VARIANT (new_type) = t;
2782 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2783 TYPE_NEXT_VARIANT (t) = new_type;
2784 }
2785 else
2786 {
2787 TYPE_MAIN_VARIANT (new_type) = new_type;
2788 TYPE_NEXT_VARIANT (new_type) = NULL;
2789 }
2790
2791 TREE_TYPE (fndecl) = new_type;
2792 DECL_VIRTUAL_P (fndecl) = 0;
2793 if (otypes)
2794 VEC_free (tree, heap, otypes);
2795 VEC_free (tree, heap, oparms);
2796 }
2797
2798 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2799 If this is a directly recursive call, CS must be NULL. Otherwise it must
2800 contain the corresponding call graph edge. */
2801
2802 void
2803 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2804 ipa_parm_adjustment_vec adjustments)
2805 {
2806 VEC(tree, heap) *vargs;
2807 VEC(tree, gc) **debug_args = NULL;
2808 gimple new_stmt;
2809 gimple_stmt_iterator gsi;
2810 tree callee_decl;
2811 int i, len;
2812
2813 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2814 vargs = VEC_alloc (tree, heap, len);
2815 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
2816
2817 gsi = gsi_for_stmt (stmt);
2818 for (i = 0; i < len; i++)
2819 {
2820 struct ipa_parm_adjustment *adj;
2821
2822 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2823
2824 if (adj->copy_param)
2825 {
2826 tree arg = gimple_call_arg (stmt, adj->base_index);
2827
2828 VEC_quick_push (tree, vargs, arg);
2829 }
2830 else if (!adj->remove_param)
2831 {
2832 tree expr, base, off;
2833 location_t loc;
2834
2835 /* We create a new parameter out of the value of the old one, we can
2836 do the following kind of transformations:
2837
2838 - A scalar passed by reference is converted to a scalar passed by
2839 value. (adj->by_ref is false and the type of the original
2840 actual argument is a pointer to a scalar).
2841
2842 - A part of an aggregate is passed instead of the whole aggregate.
2843 The part can be passed either by value or by reference, this is
2844 determined by value of adj->by_ref. Moreover, the code below
2845 handles both situations when the original aggregate is passed by
2846 value (its type is not a pointer) and when it is passed by
2847 reference (it is a pointer to an aggregate).
2848
2849 When the new argument is passed by reference (adj->by_ref is true)
2850 it must be a part of an aggregate and therefore we form it by
2851 simply taking the address of a reference inside the original
2852 aggregate. */
2853
2854 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2855 base = gimple_call_arg (stmt, adj->base_index);
2856 loc = EXPR_LOCATION (base);
2857
2858 if (TREE_CODE (base) != ADDR_EXPR
2859 && POINTER_TYPE_P (TREE_TYPE (base)))
2860 off = build_int_cst (adj->alias_ptr_type,
2861 adj->offset / BITS_PER_UNIT);
2862 else
2863 {
2864 HOST_WIDE_INT base_offset;
2865 tree prev_base;
2866
2867 if (TREE_CODE (base) == ADDR_EXPR)
2868 base = TREE_OPERAND (base, 0);
2869 prev_base = base;
2870 base = get_addr_base_and_unit_offset (base, &base_offset);
2871 /* Aggregate arguments can have non-invariant addresses. */
2872 if (!base)
2873 {
2874 base = build_fold_addr_expr (prev_base);
2875 off = build_int_cst (adj->alias_ptr_type,
2876 adj->offset / BITS_PER_UNIT);
2877 }
2878 else if (TREE_CODE (base) == MEM_REF)
2879 {
2880 off = build_int_cst (adj->alias_ptr_type,
2881 base_offset
2882 + adj->offset / BITS_PER_UNIT);
2883 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2884 off);
2885 base = TREE_OPERAND (base, 0);
2886 }
2887 else
2888 {
2889 off = build_int_cst (adj->alias_ptr_type,
2890 base_offset
2891 + adj->offset / BITS_PER_UNIT);
2892 base = build_fold_addr_expr (base);
2893 }
2894 }
2895
2896 if (!adj->by_ref)
2897 {
2898 tree type = adj->type;
2899 unsigned int align;
2900 unsigned HOST_WIDE_INT misalign;
2901
2902 get_pointer_alignment_1 (base, &align, &misalign);
2903 misalign += (tree_to_double_int (off)
2904 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
2905 * BITS_PER_UNIT);
2906 misalign = misalign & (align - 1);
2907 if (misalign != 0)
2908 align = (misalign & -misalign);
2909 if (align < TYPE_ALIGN (type))
2910 type = build_aligned_type (type, align);
2911 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2912 }
2913 else
2914 {
2915 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2916 expr = build_fold_addr_expr (expr);
2917 }
2918
2919 expr = force_gimple_operand_gsi (&gsi, expr,
2920 adj->by_ref
2921 || is_gimple_reg_type (adj->type),
2922 NULL, true, GSI_SAME_STMT);
2923 VEC_quick_push (tree, vargs, expr);
2924 }
2925 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2926 {
2927 unsigned int ix;
2928 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2929 gimple def_temp;
2930
2931 arg = gimple_call_arg (stmt, adj->base_index);
2932 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2933 {
2934 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2935 continue;
2936 arg = fold_convert_loc (gimple_location (stmt),
2937 TREE_TYPE (origin), arg);
2938 }
2939 if (debug_args == NULL)
2940 debug_args = decl_debug_args_insert (callee_decl);
2941 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
2942 if (ddecl == origin)
2943 {
2944 ddecl = VEC_index (tree, *debug_args, ix + 1);
2945 break;
2946 }
2947 if (ddecl == NULL)
2948 {
2949 ddecl = make_node (DEBUG_EXPR_DECL);
2950 DECL_ARTIFICIAL (ddecl) = 1;
2951 TREE_TYPE (ddecl) = TREE_TYPE (origin);
2952 DECL_MODE (ddecl) = DECL_MODE (origin);
2953
2954 VEC_safe_push (tree, gc, *debug_args, origin);
2955 VEC_safe_push (tree, gc, *debug_args, ddecl);
2956 }
2957 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
2958 stmt);
2959 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
2960 }
2961 }
2962
2963 if (dump_file && (dump_flags & TDF_DETAILS))
2964 {
2965 fprintf (dump_file, "replacing stmt:");
2966 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2967 }
2968
2969 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2970 VEC_free (tree, heap, vargs);
2971 if (gimple_call_lhs (stmt))
2972 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2973
2974 gimple_set_block (new_stmt, gimple_block (stmt));
2975 if (gimple_has_location (stmt))
2976 gimple_set_location (new_stmt, gimple_location (stmt));
2977 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2978 gimple_call_copy_flags (new_stmt, stmt);
2979
2980 if (dump_file && (dump_flags & TDF_DETAILS))
2981 {
2982 fprintf (dump_file, "with stmt:");
2983 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2984 fprintf (dump_file, "\n");
2985 }
2986 gsi_replace (&gsi, new_stmt, true);
2987 if (cs)
2988 cgraph_set_call_stmt (cs, new_stmt);
2989 update_ssa (TODO_update_ssa);
2990 free_dominance_info (CDI_DOMINATORS);
2991 }
2992
2993 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2994
2995 static bool
2996 index_in_adjustments_multiple_times_p (int base_index,
2997 ipa_parm_adjustment_vec adjustments)
2998 {
2999 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
3000 bool one = false;
3001
3002 for (i = 0; i < len; i++)
3003 {
3004 struct ipa_parm_adjustment *adj;
3005 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
3006
3007 if (adj->base_index == base_index)
3008 {
3009 if (one)
3010 return true;
3011 else
3012 one = true;
3013 }
3014 }
3015 return false;
3016 }
3017
3018
3019 /* Return adjustments that should have the same effect on function parameters
3020 and call arguments as if they were first changed according to adjustments in
3021 INNER and then by adjustments in OUTER. */
3022
3023 ipa_parm_adjustment_vec
3024 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3025 ipa_parm_adjustment_vec outer)
3026 {
3027 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
3028 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
3029 int removals = 0;
3030 ipa_parm_adjustment_vec adjustments, tmp;
3031
3032 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
3033 for (i = 0; i < inlen; i++)
3034 {
3035 struct ipa_parm_adjustment *n;
3036 n = &VEC_index (ipa_parm_adjustment_t, inner, i);
3037
3038 if (n->remove_param)
3039 removals++;
3040 else
3041 VEC_quick_push (ipa_parm_adjustment_t, tmp, *n);
3042 }
3043
3044 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
3045 for (i = 0; i < outlen; i++)
3046 {
3047 struct ipa_parm_adjustment r;
3048 struct ipa_parm_adjustment *out = &VEC_index (ipa_parm_adjustment_t,
3049 outer, i);
3050 struct ipa_parm_adjustment *in = &VEC_index (ipa_parm_adjustment_t, tmp,
3051 out->base_index);
3052
3053 memset (&r, 0, sizeof (r));
3054 gcc_assert (!in->remove_param);
3055 if (out->remove_param)
3056 {
3057 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3058 {
3059 r.remove_param = true;
3060 VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
3061 }
3062 continue;
3063 }
3064
3065 r.base_index = in->base_index;
3066 r.type = out->type;
3067
3068 /* FIXME: Create nonlocal value too. */
3069
3070 if (in->copy_param && out->copy_param)
3071 r.copy_param = true;
3072 else if (in->copy_param)
3073 r.offset = out->offset;
3074 else if (out->copy_param)
3075 r.offset = in->offset;
3076 else
3077 r.offset = in->offset + out->offset;
3078 VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
3079 }
3080
3081 for (i = 0; i < inlen; i++)
3082 {
3083 struct ipa_parm_adjustment *n = &VEC_index (ipa_parm_adjustment_t,
3084 inner, i);
3085
3086 if (n->remove_param)
3087 VEC_quick_push (ipa_parm_adjustment_t, adjustments, *n);
3088 }
3089
3090 VEC_free (ipa_parm_adjustment_t, heap, tmp);
3091 return adjustments;
3092 }
3093
3094 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3095 friendly way, assuming they are meant to be applied to FNDECL. */
3096
3097 void
3098 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3099 tree fndecl)
3100 {
3101 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
3102 bool first = true;
3103 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
3104
3105 fprintf (file, "IPA param adjustments: ");
3106 for (i = 0; i < len; i++)
3107 {
3108 struct ipa_parm_adjustment *adj;
3109 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
3110
3111 if (!first)
3112 fprintf (file, " ");
3113 else
3114 first = false;
3115
3116 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3117 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
3118 if (adj->base)
3119 {
3120 fprintf (file, ", base: ");
3121 print_generic_expr (file, adj->base, 0);
3122 }
3123 if (adj->reduction)
3124 {
3125 fprintf (file, ", reduction: ");
3126 print_generic_expr (file, adj->reduction, 0);
3127 }
3128 if (adj->new_ssa_base)
3129 {
3130 fprintf (file, ", new_ssa_base: ");
3131 print_generic_expr (file, adj->new_ssa_base, 0);
3132 }
3133
3134 if (adj->copy_param)
3135 fprintf (file, ", copy_param");
3136 else if (adj->remove_param)
3137 fprintf (file, ", remove_param");
3138 else
3139 fprintf (file, ", offset %li", (long) adj->offset);
3140 if (adj->by_ref)
3141 fprintf (file, ", by_ref");
3142 print_node_brief (file, ", type: ", adj->type, 0);
3143 fprintf (file, "\n");
3144 }
3145 VEC_free (tree, heap, parms);
3146 }
3147
3148 /* Stream out jump function JUMP_FUNC to OB. */
3149
3150 static void
3151 ipa_write_jump_function (struct output_block *ob,
3152 struct ipa_jump_func *jump_func)
3153 {
3154 struct ipa_agg_jf_item *item;
3155 struct bitpack_d bp;
3156 int i, count;
3157
3158 streamer_write_uhwi (ob, jump_func->type);
3159 switch (jump_func->type)
3160 {
3161 case IPA_JF_UNKNOWN:
3162 break;
3163 case IPA_JF_KNOWN_TYPE:
3164 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3165 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3166 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3167 break;
3168 case IPA_JF_CONST:
3169 gcc_assert (
3170 IS_UNKNOWN_LOCATION (EXPR_LOCATION (jump_func->value.constant)));
3171 stream_write_tree (ob, jump_func->value.constant, true);
3172 break;
3173 case IPA_JF_PASS_THROUGH:
3174 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3175 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3176 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3177 bp = bitpack_create (ob->main_stream);
3178 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3179 streamer_write_bitpack (&bp);
3180 break;
3181 case IPA_JF_ANCESTOR:
3182 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3183 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3184 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3185 bp = bitpack_create (ob->main_stream);
3186 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3187 streamer_write_bitpack (&bp);
3188 break;
3189 }
3190
3191 count = VEC_length (ipa_agg_jf_item_t, jump_func->agg.items);
3192 streamer_write_uhwi (ob, count);
3193 if (count)
3194 {
3195 bp = bitpack_create (ob->main_stream);
3196 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3197 streamer_write_bitpack (&bp);
3198 }
3199
3200 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items, i, item)
3201 {
3202 streamer_write_uhwi (ob, item->offset);
3203 stream_write_tree (ob, item->value, true);
3204 }
3205 }
3206
3207 /* Read in jump function JUMP_FUNC from IB. */
3208
3209 static void
3210 ipa_read_jump_function (struct lto_input_block *ib,
3211 struct ipa_jump_func *jump_func,
3212 struct data_in *data_in)
3213 {
3214 struct bitpack_d bp;
3215 int i, count;
3216
3217 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
3218 switch (jump_func->type)
3219 {
3220 case IPA_JF_UNKNOWN:
3221 break;
3222 case IPA_JF_KNOWN_TYPE:
3223 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
3224 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
3225 jump_func->value.known_type.component_type = stream_read_tree (ib,
3226 data_in);
3227 break;
3228 case IPA_JF_CONST:
3229 jump_func->value.constant = stream_read_tree (ib, data_in);
3230 break;
3231 case IPA_JF_PASS_THROUGH:
3232 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
3233 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
3234 jump_func->value.pass_through.operation
3235 = (enum tree_code) streamer_read_uhwi (ib);
3236 bp = streamer_read_bitpack (ib);
3237 jump_func->value.pass_through.agg_preserved = bp_unpack_value (&bp, 1);
3238 break;
3239 case IPA_JF_ANCESTOR:
3240 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
3241 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
3242 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
3243 bp = streamer_read_bitpack (ib);
3244 jump_func->value.ancestor.agg_preserved = bp_unpack_value (&bp, 1);
3245 break;
3246 }
3247
3248 count = streamer_read_uhwi (ib);
3249 jump_func->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, count);
3250 if (count)
3251 {
3252 bp = streamer_read_bitpack (ib);
3253 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3254 }
3255 for (i = 0; i < count; i++)
3256 {
3257 struct ipa_agg_jf_item item;
3258 item.offset = streamer_read_uhwi (ib);
3259 item.value = stream_read_tree (ib, data_in);
3260 VEC_quick_push (ipa_agg_jf_item_t, jump_func->agg.items, item);
3261 }
3262 }
3263
3264 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3265 relevant to indirect inlining to OB. */
3266
3267 static void
3268 ipa_write_indirect_edge_info (struct output_block *ob,
3269 struct cgraph_edge *cs)
3270 {
3271 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3272 struct bitpack_d bp;
3273
3274 streamer_write_hwi (ob, ii->param_index);
3275 streamer_write_hwi (ob, ii->offset);
3276 bp = bitpack_create (ob->main_stream);
3277 bp_pack_value (&bp, ii->polymorphic, 1);
3278 bp_pack_value (&bp, ii->agg_contents, 1);
3279 bp_pack_value (&bp, ii->by_ref, 1);
3280 streamer_write_bitpack (&bp);
3281
3282 if (ii->polymorphic)
3283 {
3284 streamer_write_hwi (ob, ii->otr_token);
3285 stream_write_tree (ob, ii->otr_type, true);
3286 }
3287 }
3288
3289 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3290 relevant to indirect inlining from IB. */
3291
3292 static void
3293 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3294 struct data_in *data_in ATTRIBUTE_UNUSED,
3295 struct cgraph_edge *cs)
3296 {
3297 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3298 struct bitpack_d bp;
3299
3300 ii->param_index = (int) streamer_read_hwi (ib);
3301 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3302 bp = streamer_read_bitpack (ib);
3303 ii->polymorphic = bp_unpack_value (&bp, 1);
3304 ii->agg_contents = bp_unpack_value (&bp, 1);
3305 ii->by_ref = bp_unpack_value (&bp, 1);
3306 if (ii->polymorphic)
3307 {
3308 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3309 ii->otr_type = stream_read_tree (ib, data_in);
3310 }
3311 }
3312
3313 /* Stream out NODE info to OB. */
3314
3315 static void
3316 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3317 {
3318 int node_ref;
3319 lto_symtab_encoder_t encoder;
3320 struct ipa_node_params *info = IPA_NODE_REF (node);
3321 int j;
3322 struct cgraph_edge *e;
3323 struct bitpack_d bp;
3324
3325 encoder = ob->decl_state->symtab_node_encoder;
3326 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3327 streamer_write_uhwi (ob, node_ref);
3328
3329 bp = bitpack_create (ob->main_stream);
3330 gcc_assert (info->uses_analysis_done
3331 || ipa_get_param_count (info) == 0);
3332 gcc_assert (!info->node_enqueued);
3333 gcc_assert (!info->ipcp_orig_node);
3334 for (j = 0; j < ipa_get_param_count (info); j++)
3335 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3336 streamer_write_bitpack (&bp);
3337 for (e = node->callees; e; e = e->next_callee)
3338 {
3339 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3340
3341 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3342 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3343 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3344 }
3345 for (e = node->indirect_calls; e; e = e->next_callee)
3346 {
3347 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3348
3349 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3350 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3351 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3352 ipa_write_indirect_edge_info (ob, e);
3353 }
3354 }
3355
3356 /* Stream in NODE info from IB. */
3357
3358 static void
3359 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3360 struct data_in *data_in)
3361 {
3362 struct ipa_node_params *info = IPA_NODE_REF (node);
3363 int k;
3364 struct cgraph_edge *e;
3365 struct bitpack_d bp;
3366
3367 ipa_initialize_node_params (node);
3368
3369 bp = streamer_read_bitpack (ib);
3370 if (ipa_get_param_count (info) != 0)
3371 info->uses_analysis_done = true;
3372 info->node_enqueued = false;
3373 for (k = 0; k < ipa_get_param_count (info); k++)
3374 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3375 for (e = node->callees; e; e = e->next_callee)
3376 {
3377 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3378 int count = streamer_read_uhwi (ib);
3379
3380 if (!count)
3381 continue;
3382 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
3383
3384 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3385 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
3386 }
3387 for (e = node->indirect_calls; e; e = e->next_callee)
3388 {
3389 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3390 int count = streamer_read_uhwi (ib);
3391
3392 if (count)
3393 {
3394 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
3395 count);
3396 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3397 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
3398 data_in);
3399 }
3400 ipa_read_indirect_edge_info (ib, data_in, e);
3401 }
3402 }
3403
3404 /* Write jump functions for nodes in SET. */
3405
3406 void
3407 ipa_prop_write_jump_functions (void)
3408 {
3409 struct cgraph_node *node;
3410 struct output_block *ob;
3411 unsigned int count = 0;
3412 lto_symtab_encoder_iterator lsei;
3413 lto_symtab_encoder_t encoder;
3414
3415
3416 if (!ipa_node_params_vector)
3417 return;
3418
3419 ob = create_output_block (LTO_section_jump_functions);
3420 encoder = ob->decl_state->symtab_node_encoder;
3421 ob->cgraph_node = NULL;
3422 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3423 lsei_next_function_in_partition (&lsei))
3424 {
3425 node = lsei_cgraph_node (lsei);
3426 if (cgraph_function_with_gimple_body_p (node)
3427 && IPA_NODE_REF (node) != NULL)
3428 count++;
3429 }
3430
3431 streamer_write_uhwi (ob, count);
3432
3433 /* Process all of the functions. */
3434 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3435 lsei_next_function_in_partition (&lsei))
3436 {
3437 node = lsei_cgraph_node (lsei);
3438 if (cgraph_function_with_gimple_body_p (node)
3439 && IPA_NODE_REF (node) != NULL)
3440 ipa_write_node_info (ob, node);
3441 }
3442 streamer_write_char_stream (ob->main_stream, 0);
3443 produce_asm (ob, NULL);
3444 destroy_output_block (ob);
3445 }
3446
3447 /* Read section in file FILE_DATA of length LEN with data DATA. */
3448
3449 static void
3450 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3451 size_t len)
3452 {
3453 const struct lto_function_header *header =
3454 (const struct lto_function_header *) data;
3455 const int cfg_offset = sizeof (struct lto_function_header);
3456 const int main_offset = cfg_offset + header->cfg_size;
3457 const int string_offset = main_offset + header->main_size;
3458 struct data_in *data_in;
3459 struct lto_input_block ib_main;
3460 unsigned int i;
3461 unsigned int count;
3462
3463 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3464 header->main_size);
3465
3466 data_in =
3467 lto_data_in_create (file_data, (const char *) data + string_offset,
3468 header->string_size, NULL);
3469 count = streamer_read_uhwi (&ib_main);
3470
3471 for (i = 0; i < count; i++)
3472 {
3473 unsigned int index;
3474 struct cgraph_node *node;
3475 lto_symtab_encoder_t encoder;
3476
3477 index = streamer_read_uhwi (&ib_main);
3478 encoder = file_data->symtab_node_encoder;
3479 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3480 gcc_assert (node->analyzed);
3481 ipa_read_node_info (&ib_main, node, data_in);
3482 }
3483 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3484 len);
3485 lto_data_in_delete (data_in);
3486 }
3487
3488 /* Read ipcp jump functions. */
3489
3490 void
3491 ipa_prop_read_jump_functions (void)
3492 {
3493 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3494 struct lto_file_decl_data *file_data;
3495 unsigned int j = 0;
3496
3497 ipa_check_create_node_params ();
3498 ipa_check_create_edge_args ();
3499 ipa_register_cgraph_hooks ();
3500
3501 while ((file_data = file_data_vec[j++]))
3502 {
3503 size_t len;
3504 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3505
3506 if (data)
3507 ipa_prop_read_section (file_data, data, len);
3508 }
3509 }
3510
3511 /* After merging units, we can get mismatch in argument counts.
3512 Also decl merging might've rendered parameter lists obsolete.
3513 Also compute called_with_variable_arg info. */
3514
3515 void
3516 ipa_update_after_lto_read (void)
3517 {
3518 struct cgraph_node *node;
3519
3520 ipa_check_create_node_params ();
3521 ipa_check_create_edge_args ();
3522
3523 FOR_EACH_DEFINED_FUNCTION (node)
3524 if (node->analyzed)
3525 ipa_initialize_node_params (node);
3526 }