params.def (PARAM_IPA_MAX_AGG_ITEMS): New parameter.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
41
42
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
45
46 struct param_analysis_info
47 {
48 bool parm_modified, ref_modified, pt_modified;
49 bitmap parm_visited_statements, pt_visited_statements;
50 };
51
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
56
57 /* Holders of ipa cgraph hooks: */
58 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
59 static struct cgraph_node_hook_list *node_removal_hook_holder;
60 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
61 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
62 static struct cgraph_node_hook_list *function_insertion_hook_holder;
63
64 /* Return index of the formal whose tree is PTREE in function which corresponds
65 to INFO. */
66
67 int
68 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
69 {
70 int i, count;
71
72 count = ipa_get_param_count (info);
73 for (i = 0; i < count; i++)
74 if (ipa_get_param (info, i) == ptree)
75 return i;
76
77 return -1;
78 }
79
80 /* Populate the param_decl field in parameter descriptors of INFO that
81 corresponds to NODE. */
82
83 static void
84 ipa_populate_param_decls (struct cgraph_node *node,
85 struct ipa_node_params *info)
86 {
87 tree fndecl;
88 tree fnargs;
89 tree parm;
90 int param_num;
91
92 fndecl = node->symbol.decl;
93 fnargs = DECL_ARGUMENTS (fndecl);
94 param_num = 0;
95 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
96 {
97 VEC_index (ipa_param_descriptor_t,
98 info->descriptors, param_num).decl = parm;
99 param_num++;
100 }
101 }
102
103 /* Return how many formal parameters FNDECL has. */
104
105 static inline int
106 count_formal_params (tree fndecl)
107 {
108 tree parm;
109 int count = 0;
110
111 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
112 count++;
113
114 return count;
115 }
116
117 /* Initialize the ipa_node_params structure associated with NODE by counting
118 the function parameters, creating the descriptors and populating their
119 param_decls. */
120
121 void
122 ipa_initialize_node_params (struct cgraph_node *node)
123 {
124 struct ipa_node_params *info = IPA_NODE_REF (node);
125
126 if (!info->descriptors)
127 {
128 int param_count;
129
130 param_count = count_formal_params (node->symbol.decl);
131 if (param_count)
132 {
133 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
134 info->descriptors, param_count);
135 ipa_populate_param_decls (node, info);
136 }
137 }
138 }
139
140 /* Print the jump functions associated with call graph edge CS to file F. */
141
142 static void
143 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
144 {
145 int i, count;
146
147 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
148 for (i = 0; i < count; i++)
149 {
150 struct ipa_jump_func *jump_func;
151 enum jump_func_type type;
152
153 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
154 type = jump_func->type;
155
156 fprintf (f, " param %d: ", i);
157 if (type == IPA_JF_UNKNOWN)
158 fprintf (f, "UNKNOWN\n");
159 else if (type == IPA_JF_KNOWN_TYPE)
160 {
161 fprintf (f, "KNOWN TYPE: base ");
162 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
163 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
164 jump_func->value.known_type.offset);
165 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
166 fprintf (f, "\n");
167 }
168 else if (type == IPA_JF_CONST)
169 {
170 tree val = jump_func->value.constant;
171 fprintf (f, "CONST: ");
172 print_generic_expr (f, val, 0);
173 if (TREE_CODE (val) == ADDR_EXPR
174 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
175 {
176 fprintf (f, " -> ");
177 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
178 0);
179 }
180 fprintf (f, "\n");
181 }
182 else if (type == IPA_JF_PASS_THROUGH)
183 {
184 fprintf (f, "PASS THROUGH: ");
185 fprintf (f, "%d, op %s",
186 jump_func->value.pass_through.formal_id,
187 tree_code_name[(int)
188 jump_func->value.pass_through.operation]);
189 if (jump_func->value.pass_through.operation != NOP_EXPR)
190 {
191 fprintf (f, " ");
192 print_generic_expr (f,
193 jump_func->value.pass_through.operand, 0);
194 }
195 if (jump_func->value.pass_through.agg_preserved)
196 fprintf (f, ", agg_preserved");
197 fprintf (f, "\n");
198 }
199 else if (type == IPA_JF_ANCESTOR)
200 {
201 fprintf (f, "ANCESTOR: ");
202 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
203 jump_func->value.ancestor.formal_id,
204 jump_func->value.ancestor.offset);
205 print_generic_expr (f, jump_func->value.ancestor.type, 0);
206 if (jump_func->value.ancestor.agg_preserved)
207 fprintf (f, ", agg_preserved");
208 fprintf (f, "\n");
209 }
210
211 if (jump_func->agg.items)
212 {
213 struct ipa_agg_jf_item *item;
214 int j;
215
216 fprintf (f, " Aggregate passed by %s:\n",
217 jump_func->agg.by_ref ? "reference" : "value");
218 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items,
219 j, item)
220 {
221 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
222 item->offset);
223 if (TYPE_P (item->value))
224 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
225 tree_low_cst (TYPE_SIZE (item->value), 1));
226 else
227 {
228 fprintf (f, "cst: ");
229 print_generic_expr (f, item->value, 0);
230 }
231 fprintf (f, "\n");
232 }
233 }
234 }
235 }
236
237
238 /* Print the jump functions of all arguments on all call graph edges going from
239 NODE to file F. */
240
241 void
242 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
243 {
244 struct cgraph_edge *cs;
245 int i;
246
247 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
248 for (cs = node->callees; cs; cs = cs->next_callee)
249 {
250 if (!ipa_edge_args_info_available_for_edge_p (cs))
251 continue;
252
253 fprintf (f, " callsite %s/%i -> %s/%i : \n",
254 xstrdup (cgraph_node_name (node)), node->uid,
255 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
256 ipa_print_node_jump_functions_for_edge (f, cs);
257 }
258
259 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
260 {
261 if (!ipa_edge_args_info_available_for_edge_p (cs))
262 continue;
263
264 if (cs->call_stmt)
265 {
266 fprintf (f, " indirect callsite %d for stmt ", i);
267 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
268 }
269 else
270 fprintf (f, " indirect callsite %d :\n", i);
271 ipa_print_node_jump_functions_for_edge (f, cs);
272
273 }
274 }
275
276 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
277
278 void
279 ipa_print_all_jump_functions (FILE *f)
280 {
281 struct cgraph_node *node;
282
283 fprintf (f, "\nJump functions:\n");
284 FOR_EACH_FUNCTION (node)
285 {
286 ipa_print_node_jump_functions (f, node);
287 }
288 }
289
290 /* Set JFUNC to be a known type jump function. */
291
292 static void
293 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
294 tree base_type, tree component_type)
295 {
296 jfunc->type = IPA_JF_KNOWN_TYPE;
297 jfunc->value.known_type.offset = offset,
298 jfunc->value.known_type.base_type = base_type;
299 jfunc->value.known_type.component_type = component_type;
300 }
301
302 /* Set JFUNC to be a constant jmp function. */
303
304 static void
305 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
306 {
307 jfunc->type = IPA_JF_CONST;
308 jfunc->value.constant = constant;
309 }
310
311 /* Set JFUNC to be a simple pass-through jump function. */
312 static void
313 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
314 bool agg_preserved)
315 {
316 jfunc->type = IPA_JF_PASS_THROUGH;
317 jfunc->value.pass_through.operand = NULL_TREE;
318 jfunc->value.pass_through.formal_id = formal_id;
319 jfunc->value.pass_through.operation = NOP_EXPR;
320 jfunc->value.pass_through.agg_preserved = agg_preserved;
321 }
322
323 /* Set JFUNC to be an arithmetic pass through jump function. */
324
325 static void
326 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
327 tree operand, enum tree_code operation)
328 {
329 jfunc->type = IPA_JF_PASS_THROUGH;
330 jfunc->value.pass_through.operand = operand;
331 jfunc->value.pass_through.formal_id = formal_id;
332 jfunc->value.pass_through.operation = operation;
333 jfunc->value.pass_through.agg_preserved = false;
334 }
335
336 /* Set JFUNC to be an ancestor jump function. */
337
338 static void
339 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
340 tree type, int formal_id, bool agg_preserved)
341 {
342 jfunc->type = IPA_JF_ANCESTOR;
343 jfunc->value.ancestor.formal_id = formal_id;
344 jfunc->value.ancestor.offset = offset;
345 jfunc->value.ancestor.type = type;
346 jfunc->value.ancestor.agg_preserved = agg_preserved;
347 }
348
349 /* Structure to be passed in between detect_type_change and
350 check_stmt_for_type_change. */
351
352 struct type_change_info
353 {
354 /* Offset into the object where there is the virtual method pointer we are
355 looking for. */
356 HOST_WIDE_INT offset;
357 /* The declaration or SSA_NAME pointer of the base that we are checking for
358 type change. */
359 tree object;
360 /* If we actually can tell the type that the object has changed to, it is
361 stored in this field. Otherwise it remains NULL_TREE. */
362 tree known_current_type;
363 /* Set to true if dynamic type change has been detected. */
364 bool type_maybe_changed;
365 /* Set to true if multiple types have been encountered. known_current_type
366 must be disregarded in that case. */
367 bool multiple_types_encountered;
368 };
369
370 /* Return true if STMT can modify a virtual method table pointer.
371
372 This function makes special assumptions about both constructors and
373 destructors which are all the functions that are allowed to alter the VMT
374 pointers. It assumes that destructors begin with assignment into all VMT
375 pointers and that constructors essentially look in the following way:
376
377 1) The very first thing they do is that they call constructors of ancestor
378 sub-objects that have them.
379
380 2) Then VMT pointers of this and all its ancestors is set to new values
381 corresponding to the type corresponding to the constructor.
382
383 3) Only afterwards, other stuff such as constructor of member sub-objects
384 and the code written by the user is run. Only this may include calling
385 virtual functions, directly or indirectly.
386
387 There is no way to call a constructor of an ancestor sub-object in any
388 other way.
389
390 This means that we do not have to care whether constructors get the correct
391 type information because they will always change it (in fact, if we define
392 the type to be given by the VMT pointer, it is undefined).
393
394 The most important fact to derive from the above is that if, for some
395 statement in the section 3, we try to detect whether the dynamic type has
396 changed, we can safely ignore all calls as we examine the function body
397 backwards until we reach statements in section 2 because these calls cannot
398 be ancestor constructors or destructors (if the input is not bogus) and so
399 do not change the dynamic type (this holds true only for automatically
400 allocated objects but at the moment we devirtualize only these). We then
401 must detect that statements in section 2 change the dynamic type and can try
402 to derive the new type. That is enough and we can stop, we will never see
403 the calls into constructors of sub-objects in this code. Therefore we can
404 safely ignore all call statements that we traverse.
405 */
406
407 static bool
408 stmt_may_be_vtbl_ptr_store (gimple stmt)
409 {
410 if (is_gimple_call (stmt))
411 return false;
412 else if (is_gimple_assign (stmt))
413 {
414 tree lhs = gimple_assign_lhs (stmt);
415
416 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
417 {
418 if (flag_strict_aliasing
419 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
420 return false;
421
422 if (TREE_CODE (lhs) == COMPONENT_REF
423 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
424 return false;
425 /* In the future we might want to use get_base_ref_and_offset to find
426 if there is a field corresponding to the offset and if so, proceed
427 almost like if it was a component ref. */
428 }
429 }
430 return true;
431 }
432
433 /* If STMT can be proved to be an assignment to the virtual method table
434 pointer of ANALYZED_OBJ and the type associated with the new table
435 identified, return the type. Otherwise return NULL_TREE. */
436
437 static tree
438 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
439 {
440 HOST_WIDE_INT offset, size, max_size;
441 tree lhs, rhs, base;
442
443 if (!gimple_assign_single_p (stmt))
444 return NULL_TREE;
445
446 lhs = gimple_assign_lhs (stmt);
447 rhs = gimple_assign_rhs1 (stmt);
448 if (TREE_CODE (lhs) != COMPONENT_REF
449 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
450 || TREE_CODE (rhs) != ADDR_EXPR)
451 return NULL_TREE;
452 rhs = get_base_address (TREE_OPERAND (rhs, 0));
453 if (!rhs
454 || TREE_CODE (rhs) != VAR_DECL
455 || !DECL_VIRTUAL_P (rhs))
456 return NULL_TREE;
457
458 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
459 if (offset != tci->offset
460 || size != POINTER_SIZE
461 || max_size != POINTER_SIZE)
462 return NULL_TREE;
463 if (TREE_CODE (base) == MEM_REF)
464 {
465 if (TREE_CODE (tci->object) != MEM_REF
466 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
467 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
468 TREE_OPERAND (base, 1)))
469 return NULL_TREE;
470 }
471 else if (tci->object != base)
472 return NULL_TREE;
473
474 return DECL_CONTEXT (rhs);
475 }
476
477 /* Callback of walk_aliased_vdefs and a helper function for
478 detect_type_change to check whether a particular statement may modify
479 the virtual table pointer, and if possible also determine the new type of
480 the (sub-)object. It stores its result into DATA, which points to a
481 type_change_info structure. */
482
483 static bool
484 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
485 {
486 gimple stmt = SSA_NAME_DEF_STMT (vdef);
487 struct type_change_info *tci = (struct type_change_info *) data;
488
489 if (stmt_may_be_vtbl_ptr_store (stmt))
490 {
491 tree type;
492 type = extr_type_from_vtbl_ptr_store (stmt, tci);
493 if (tci->type_maybe_changed
494 && type != tci->known_current_type)
495 tci->multiple_types_encountered = true;
496 tci->known_current_type = type;
497 tci->type_maybe_changed = true;
498 return true;
499 }
500 else
501 return false;
502 }
503
504
505
506 /* Like detect_type_change but with extra argument COMP_TYPE which will become
507 the component type part of new JFUNC of dynamic type change is detected and
508 the new base type is identified. */
509
510 static bool
511 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
512 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
513 {
514 struct type_change_info tci;
515 ao_ref ao;
516
517 gcc_checking_assert (DECL_P (arg)
518 || TREE_CODE (arg) == MEM_REF
519 || handled_component_p (arg));
520 /* Const calls cannot call virtual methods through VMT and so type changes do
521 not matter. */
522 if (!flag_devirtualize || !gimple_vuse (call))
523 return false;
524
525 ao_ref_init (&ao, arg);
526 ao.base = base;
527 ao.offset = offset;
528 ao.size = POINTER_SIZE;
529 ao.max_size = ao.size;
530
531 tci.offset = offset;
532 tci.object = get_base_address (arg);
533 tci.known_current_type = NULL_TREE;
534 tci.type_maybe_changed = false;
535 tci.multiple_types_encountered = false;
536
537 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
538 &tci, NULL);
539 if (!tci.type_maybe_changed)
540 return false;
541
542 if (!tci.known_current_type
543 || tci.multiple_types_encountered
544 || offset != 0)
545 jfunc->type = IPA_JF_UNKNOWN;
546 else
547 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
548
549 return true;
550 }
551
552 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
553 looking for assignments to its virtual table pointer. If it is, return true
554 and fill in the jump function JFUNC with relevant type information or set it
555 to unknown. ARG is the object itself (not a pointer to it, unless
556 dereferenced). BASE is the base of the memory access as returned by
557 get_ref_base_and_extent, as is the offset. */
558
559 static bool
560 detect_type_change (tree arg, tree base, gimple call,
561 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
562 {
563 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
564 }
565
566 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
567 SSA name (its dereference will become the base and the offset is assumed to
568 be zero). */
569
570 static bool
571 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
572 {
573 tree comp_type;
574
575 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
576 if (!flag_devirtualize
577 || !POINTER_TYPE_P (TREE_TYPE (arg))
578 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
579 return false;
580
581 comp_type = TREE_TYPE (TREE_TYPE (arg));
582 arg = build2 (MEM_REF, ptr_type_node, arg,
583 build_int_cst (ptr_type_node, 0));
584
585 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
586 }
587
588 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
589 boolean variable pointed to by DATA. */
590
591 static bool
592 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
593 void *data)
594 {
595 bool *b = (bool *) data;
596 *b = true;
597 return true;
598 }
599
600 /* Return true if a load from a formal parameter PARM_LOAD is known to retreive
601 a value known not to be modified in this function before reaching the
602 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
603 information about the parameter. */
604
605 static bool
606 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
607 gimple stmt, tree parm_load)
608 {
609 bool modified = false;
610 bitmap *visited_stmts;
611 ao_ref refd;
612
613 if (parm_ainfo && parm_ainfo->parm_modified)
614 return false;
615
616 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
617 ao_ref_init (&refd, parm_load);
618 /* We can cache visited statements only when parm_ainfo is available and when
619 we are looking at a naked load of the whole parameter. */
620 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
621 visited_stmts = NULL;
622 else
623 visited_stmts = &parm_ainfo->parm_visited_statements;
624 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
625 visited_stmts);
626 if (parm_ainfo && modified)
627 parm_ainfo->parm_modified = true;
628 return !modified;
629 }
630
631 /* If STMT is an assignment that loads a value from an parameter declaration,
632 return the index of the parameter in ipa_node_params which has not been
633 modified. Otherwise return -1. */
634
635 static int
636 load_from_unmodified_param (struct ipa_node_params *info,
637 struct param_analysis_info *parms_ainfo,
638 gimple stmt)
639 {
640 int index;
641 tree op1;
642
643 if (!gimple_assign_single_p (stmt))
644 return -1;
645
646 op1 = gimple_assign_rhs1 (stmt);
647 if (TREE_CODE (op1) != PARM_DECL)
648 return -1;
649
650 index = ipa_get_param_decl_index (info, op1);
651 if (index < 0
652 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
653 : NULL, stmt, op1))
654 return -1;
655
656 return index;
657 }
658
659 /* Return true if memory reference REF loads data that are known to be
660 unmodified in this function before reaching statement STMT. PARM_AINFO, if
661 non-NULL, is a pointer to a structure containing temporary information about
662 PARM. */
663
664 static bool
665 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
666 gimple stmt, tree ref)
667 {
668 bool modified = false;
669 ao_ref refd;
670
671 gcc_checking_assert (gimple_vuse (stmt));
672 if (parm_ainfo && parm_ainfo->ref_modified)
673 return false;
674
675 ao_ref_init (&refd, ref);
676 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
677 NULL);
678 if (parm_ainfo && modified)
679 parm_ainfo->ref_modified = true;
680 return !modified;
681 }
682
683 /* Return true if the data pointed to by PARM is known to be unmodified in this
684 function before reaching call statement CALL into which it is passed.
685 PARM_AINFO is a pointer to a structure containing temporary information
686 about PARM. */
687
688 static bool
689 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
690 gimple call, tree parm)
691 {
692 bool modified = false;
693 ao_ref refd;
694
695 /* It's unnecessary to calculate anything about memory contnets for a const
696 function because it is not goin to use it. But do not cache the result
697 either. Also, no such calculations for non-pointers. */
698 if (!gimple_vuse (call)
699 || !POINTER_TYPE_P (TREE_TYPE (parm)))
700 return false;
701
702 if (parm_ainfo->pt_modified)
703 return false;
704
705 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
706 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
707 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
708 if (modified)
709 parm_ainfo->pt_modified = true;
710 return !modified;
711 }
712
713 /* Return true if we can prove that OP is a memory reference loading unmodified
714 data from an aggregate passed as a parameter and if the aggregate is passed
715 by reference, that the alias type of the load corresponds to the type of the
716 formal parameter (so that we can rely on this type for TBAA in callers).
717 INFO and PARMS_AINFO describe parameters of the current function (but the
718 latter can be NULL), STMT is the load statement. If function returns true,
719 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
720 within the aggregate and whether it is a load from a value passed by
721 reference respectively. */
722
723 static bool
724 ipa_load_from_parm_agg_1 (struct ipa_node_params *info,
725 struct param_analysis_info *parms_ainfo, gimple stmt,
726 tree op, int *index_p, HOST_WIDE_INT *offset_p,
727 bool *by_ref_p)
728 {
729 int index;
730 HOST_WIDE_INT size, max_size;
731 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
732
733 if (max_size == -1 || max_size != size || *offset_p < 0)
734 return false;
735
736 if (DECL_P (base))
737 {
738 int index = ipa_get_param_decl_index (info, base);
739 if (index >= 0
740 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
741 : NULL, stmt, op))
742 {
743 *index_p = index;
744 *by_ref_p = false;
745 return true;
746 }
747 return false;
748 }
749
750 if (TREE_CODE (base) != MEM_REF
751 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
752 || !integer_zerop (TREE_OPERAND (base, 1)))
753 return false;
754
755 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
756 {
757 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
758 index = ipa_get_param_decl_index (info, parm);
759 }
760 else
761 {
762 /* This branch catches situations where a pointer parameter is not a
763 gimple register, for example:
764
765 void hip7(S*) (struct S * p)
766 {
767 void (*<T2e4>) (struct S *) D.1867;
768 struct S * p.1;
769
770 <bb 2>:
771 p.1_1 = p;
772 D.1867_2 = p.1_1->f;
773 D.1867_2 ();
774 gdp = &p;
775 */
776
777 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
778 index = load_from_unmodified_param (info, parms_ainfo, def);
779 }
780
781 if (index >= 0
782 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
783 stmt, op))
784 {
785 *index_p = index;
786 *by_ref_p = true;
787 return true;
788 }
789 return false;
790 }
791
792 /* Just like the previous function, just without the param_analysis_info
793 pointer, for users outside of this file. */
794
795 bool
796 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
797 tree op, int *index_p, HOST_WIDE_INT *offset_p,
798 bool *by_ref_p)
799 {
800 return ipa_load_from_parm_agg_1 (info, NULL, stmt, op, index_p, offset_p,
801 by_ref_p);
802 }
803
804 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
805 of an assignment statement STMT, try to determine whether we are actually
806 handling any of the following cases and construct an appropriate jump
807 function into JFUNC if so:
808
809 1) The passed value is loaded from a formal parameter which is not a gimple
810 register (most probably because it is addressable, the value has to be
811 scalar) and we can guarantee the value has not changed. This case can
812 therefore be described by a simple pass-through jump function. For example:
813
814 foo (int a)
815 {
816 int a.0;
817
818 a.0_2 = a;
819 bar (a.0_2);
820
821 2) The passed value can be described by a simple arithmetic pass-through
822 jump function. E.g.
823
824 foo (int a)
825 {
826 int D.2064;
827
828 D.2064_4 = a.1(D) + 4;
829 bar (D.2064_4);
830
831 This case can also occur in combination of the previous one, e.g.:
832
833 foo (int a, int z)
834 {
835 int a.0;
836 int D.2064;
837
838 a.0_3 = a;
839 D.2064_4 = a.0_3 + 4;
840 foo (D.2064_4);
841
842 3) The passed value is an address of an object within another one (which
843 also passed by reference). Such situations are described by an ancestor
844 jump function and describe situations such as:
845
846 B::foo() (struct B * const this)
847 {
848 struct A * D.1845;
849
850 D.1845_2 = &this_1(D)->D.1748;
851 A::bar (D.1845_2);
852
853 INFO is the structure describing individual parameters access different
854 stages of IPA optimizations. PARMS_AINFO contains the information that is
855 only needed for intraprocedural analysis. */
856
857 static void
858 compute_complex_assign_jump_func (struct ipa_node_params *info,
859 struct param_analysis_info *parms_ainfo,
860 struct ipa_jump_func *jfunc,
861 gimple call, gimple stmt, tree name)
862 {
863 HOST_WIDE_INT offset, size, max_size;
864 tree op1, tc_ssa, base, ssa;
865 int index;
866
867 op1 = gimple_assign_rhs1 (stmt);
868
869 if (TREE_CODE (op1) == SSA_NAME)
870 {
871 if (SSA_NAME_IS_DEFAULT_DEF (op1))
872 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
873 else
874 index = load_from_unmodified_param (info, parms_ainfo,
875 SSA_NAME_DEF_STMT (op1));
876 tc_ssa = op1;
877 }
878 else
879 {
880 index = load_from_unmodified_param (info, parms_ainfo, stmt);
881 tc_ssa = gimple_assign_lhs (stmt);
882 }
883
884 if (index >= 0)
885 {
886 tree op2 = gimple_assign_rhs2 (stmt);
887
888 if (op2)
889 {
890 if (!is_gimple_ip_invariant (op2)
891 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
892 && !useless_type_conversion_p (TREE_TYPE (name),
893 TREE_TYPE (op1))))
894 return;
895
896 ipa_set_jf_arith_pass_through (jfunc, index, op2,
897 gimple_assign_rhs_code (stmt));
898 }
899 else if (gimple_assign_single_p (stmt)
900 && !detect_type_change_ssa (tc_ssa, call, jfunc))
901 {
902 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
903 call, tc_ssa);
904 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
905 }
906 return;
907 }
908
909 if (TREE_CODE (op1) != ADDR_EXPR)
910 return;
911 op1 = TREE_OPERAND (op1, 0);
912 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
913 return;
914 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
915 if (TREE_CODE (base) != MEM_REF
916 /* If this is a varying address, punt. */
917 || max_size == -1
918 || max_size != size)
919 return;
920 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
921 ssa = TREE_OPERAND (base, 0);
922 if (TREE_CODE (ssa) != SSA_NAME
923 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
924 || offset < 0)
925 return;
926
927 /* Dynamic types are changed only in constructors and destructors and */
928 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
929 if (index >= 0
930 && !detect_type_change (op1, base, call, jfunc, offset))
931 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
932 parm_ref_data_pass_through_p (&parms_ainfo[index],
933 call, ssa));
934 }
935
936 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
937 it looks like:
938
939 iftmp.1_3 = &obj_2(D)->D.1762;
940
941 The base of the MEM_REF must be a default definition SSA NAME of a
942 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
943 whole MEM_REF expression is returned and the offset calculated from any
944 handled components and the MEM_REF itself is stored into *OFFSET. The whole
945 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
946
947 static tree
948 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
949 {
950 HOST_WIDE_INT size, max_size;
951 tree expr, parm, obj;
952
953 if (!gimple_assign_single_p (assign))
954 return NULL_TREE;
955 expr = gimple_assign_rhs1 (assign);
956
957 if (TREE_CODE (expr) != ADDR_EXPR)
958 return NULL_TREE;
959 expr = TREE_OPERAND (expr, 0);
960 obj = expr;
961 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
962
963 if (TREE_CODE (expr) != MEM_REF
964 /* If this is a varying address, punt. */
965 || max_size == -1
966 || max_size != size
967 || *offset < 0)
968 return NULL_TREE;
969 parm = TREE_OPERAND (expr, 0);
970 if (TREE_CODE (parm) != SSA_NAME
971 || !SSA_NAME_IS_DEFAULT_DEF (parm)
972 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
973 return NULL_TREE;
974
975 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
976 *obj_p = obj;
977 return expr;
978 }
979
980
981 /* Given that an actual argument is an SSA_NAME that is a result of a phi
982 statement PHI, try to find out whether NAME is in fact a
983 multiple-inheritance typecast from a descendant into an ancestor of a formal
984 parameter and thus can be described by an ancestor jump function and if so,
985 write the appropriate function into JFUNC.
986
987 Essentially we want to match the following pattern:
988
989 if (obj_2(D) != 0B)
990 goto <bb 3>;
991 else
992 goto <bb 4>;
993
994 <bb 3>:
995 iftmp.1_3 = &obj_2(D)->D.1762;
996
997 <bb 4>:
998 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
999 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1000 return D.1879_6; */
1001
1002 static void
1003 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1004 struct param_analysis_info *parms_ainfo,
1005 struct ipa_jump_func *jfunc,
1006 gimple call, gimple phi)
1007 {
1008 HOST_WIDE_INT offset;
1009 gimple assign, cond;
1010 basic_block phi_bb, assign_bb, cond_bb;
1011 tree tmp, parm, expr, obj;
1012 int index, i;
1013
1014 if (gimple_phi_num_args (phi) != 2)
1015 return;
1016
1017 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1018 tmp = PHI_ARG_DEF (phi, 0);
1019 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1020 tmp = PHI_ARG_DEF (phi, 1);
1021 else
1022 return;
1023 if (TREE_CODE (tmp) != SSA_NAME
1024 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1025 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1026 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1027 return;
1028
1029 assign = SSA_NAME_DEF_STMT (tmp);
1030 assign_bb = gimple_bb (assign);
1031 if (!single_pred_p (assign_bb))
1032 return;
1033 expr = get_ancestor_addr_info (assign, &obj, &offset);
1034 if (!expr)
1035 return;
1036 parm = TREE_OPERAND (expr, 0);
1037 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1038 gcc_assert (index >= 0);
1039
1040 cond_bb = single_pred (assign_bb);
1041 cond = last_stmt (cond_bb);
1042 if (!cond
1043 || gimple_code (cond) != GIMPLE_COND
1044 || gimple_cond_code (cond) != NE_EXPR
1045 || gimple_cond_lhs (cond) != parm
1046 || !integer_zerop (gimple_cond_rhs (cond)))
1047 return;
1048
1049 phi_bb = gimple_bb (phi);
1050 for (i = 0; i < 2; i++)
1051 {
1052 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1053 if (pred != assign_bb && pred != cond_bb)
1054 return;
1055 }
1056
1057 if (!detect_type_change (obj, expr, call, jfunc, offset))
1058 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1059 parm_ref_data_pass_through_p (&parms_ainfo[index],
1060 call, parm));
1061 }
1062
1063 /* Given OP which is passed as an actual argument to a called function,
1064 determine if it is possible to construct a KNOWN_TYPE jump function for it
1065 and if so, create one and store it to JFUNC. */
1066
1067 static void
1068 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1069 gimple call)
1070 {
1071 HOST_WIDE_INT offset, size, max_size;
1072 tree base;
1073
1074 if (!flag_devirtualize
1075 || TREE_CODE (op) != ADDR_EXPR
1076 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1077 return;
1078
1079 op = TREE_OPERAND (op, 0);
1080 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1081 if (!DECL_P (base)
1082 || max_size == -1
1083 || max_size != size
1084 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1085 || is_global_var (base))
1086 return;
1087
1088 if (!TYPE_BINFO (TREE_TYPE (base))
1089 || detect_type_change (op, base, call, jfunc, offset))
1090 return;
1091
1092 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1093 }
1094
1095 /* Inspect the given TYPE and return true iff it has the same structure (the
1096 same number of fields of the same types) as a C++ member pointer. If
1097 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1098 corresponding fields there. */
1099
1100 static bool
1101 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1102 {
1103 tree fld;
1104
1105 if (TREE_CODE (type) != RECORD_TYPE)
1106 return false;
1107
1108 fld = TYPE_FIELDS (type);
1109 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1110 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1111 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1112 return false;
1113
1114 if (method_ptr)
1115 *method_ptr = fld;
1116
1117 fld = DECL_CHAIN (fld);
1118 if (!fld || INTEGRAL_TYPE_P (fld)
1119 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1120 return false;
1121 if (delta)
1122 *delta = fld;
1123
1124 if (DECL_CHAIN (fld))
1125 return false;
1126
1127 return true;
1128 }
1129
1130 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1131 return the rhs of its defining statement. Otherwise return RHS as it
1132 is. */
1133
1134 static inline tree
1135 get_ssa_def_if_simple_copy (tree rhs)
1136 {
1137 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1138 {
1139 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1140
1141 if (gimple_assign_single_p (def_stmt))
1142 rhs = gimple_assign_rhs1 (def_stmt);
1143 else
1144 break;
1145 }
1146 return rhs;
1147 }
1148
1149 /* Simple linked list, describing known contents of an aggregate beforere
1150 call. */
1151
1152 struct ipa_known_agg_contents_list
1153 {
1154 /* Offset and size of the described part of the aggregate. */
1155 HOST_WIDE_INT offset, size;
1156 /* Known constant value or NULL if the contents is known to be unknown. */
1157 tree constant;
1158 /* Pointer to the next structure in the list. */
1159 struct ipa_known_agg_contents_list *next;
1160 };
1161
1162 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1163 in ARG is filled in with constant values. ARG can either be an aggregate
1164 expression or a pointer to an aggregate. JFUNC is the jump function into
1165 which the constants are subsequently stored. */
1166
1167 static void
1168 determine_known_aggregate_parts (gimple call, tree arg,
1169 struct ipa_jump_func *jfunc)
1170 {
1171 struct ipa_known_agg_contents_list *list = NULL;
1172 int item_count = 0, const_count = 0;
1173 HOST_WIDE_INT arg_offset, arg_size;
1174 gimple_stmt_iterator gsi;
1175 tree arg_base;
1176 bool check_ref, by_ref;
1177 ao_ref r;
1178
1179 /* The function operates in three stages. First, we prepare check_ref, r,
1180 arg_base and arg_offset based on what is actually passed as an actual
1181 argument. */
1182
1183 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1184 {
1185 by_ref = true;
1186 if (TREE_CODE (arg) == SSA_NAME)
1187 {
1188 tree type_size;
1189 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1190 return;
1191 check_ref = true;
1192 arg_base = arg;
1193 arg_offset = 0;
1194 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1195 arg_size = tree_low_cst (type_size, 1);
1196 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1197 }
1198 else if (TREE_CODE (arg) == ADDR_EXPR)
1199 {
1200 HOST_WIDE_INT arg_max_size;
1201
1202 arg = TREE_OPERAND (arg, 0);
1203 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1204 &arg_max_size);
1205 if (arg_max_size == -1
1206 || arg_max_size != arg_size
1207 || arg_offset < 0)
1208 return;
1209 if (DECL_P (arg_base))
1210 {
1211 tree size;
1212 check_ref = false;
1213 size = build_int_cst (integer_type_node, arg_size);
1214 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1215 }
1216 else
1217 return;
1218 }
1219 else
1220 return;
1221 }
1222 else
1223 {
1224 HOST_WIDE_INT arg_max_size;
1225
1226 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1227
1228 by_ref = false;
1229 check_ref = false;
1230 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1231 &arg_max_size);
1232 if (arg_max_size == -1
1233 || arg_max_size != arg_size
1234 || arg_offset < 0)
1235 return;
1236
1237 ao_ref_init (&r, arg);
1238 }
1239
1240 /* Second stage walks back the BB, looks at individual statements and as long
1241 as it is confident of how the statements affect contents of the
1242 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1243 describing it. */
1244 gsi = gsi_for_stmt (call);
1245 gsi_prev (&gsi);
1246 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1247 {
1248 struct ipa_known_agg_contents_list *n, **p;
1249 gimple stmt = gsi_stmt (gsi);
1250 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1251 tree lhs, rhs, lhs_base;
1252 bool partial_overlap;
1253
1254 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1255 continue;
1256 if (!gimple_assign_single_p (stmt))
1257 break;
1258
1259 lhs = gimple_assign_lhs (stmt);
1260 rhs = gimple_assign_rhs1 (stmt);
1261 if (!is_gimple_reg_type (rhs))
1262 break;
1263
1264 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1265 &lhs_max_size);
1266 if (lhs_max_size == -1
1267 || lhs_max_size != lhs_size
1268 || (lhs_offset < arg_offset
1269 && lhs_offset + lhs_size > arg_offset)
1270 || (lhs_offset < arg_offset + arg_size
1271 && lhs_offset + lhs_size > arg_offset + arg_size))
1272 break;
1273
1274 if (check_ref)
1275 {
1276 if (TREE_CODE (lhs_base) != MEM_REF
1277 || TREE_OPERAND (lhs_base, 0) != arg_base
1278 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1279 break;
1280 }
1281 else if (lhs_base != arg_base)
1282 break;
1283
1284 if (lhs_offset + lhs_size < arg_offset
1285 || lhs_offset >= (arg_offset + arg_size))
1286 continue;
1287
1288 partial_overlap = false;
1289 p = &list;
1290 while (*p && (*p)->offset < lhs_offset)
1291 {
1292 if ((*p)->offset + (*p)->size > lhs_offset)
1293 {
1294 partial_overlap = true;
1295 break;
1296 }
1297 p = &(*p)->next;
1298 }
1299 if (partial_overlap)
1300 break;
1301 if (*p && (*p)->offset < lhs_offset + lhs_size)
1302 {
1303 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1304 /* We already know this value is subsequently overwritten with
1305 something else. */
1306 continue;
1307 else
1308 /* Otherwise this is a partial overlap which we cannot
1309 represent. */
1310 break;
1311 }
1312
1313 rhs = get_ssa_def_if_simple_copy (rhs);
1314 n = XALLOCA (struct ipa_known_agg_contents_list);
1315 n->size = lhs_size;
1316 n->offset = lhs_offset;
1317 if (is_gimple_ip_invariant (rhs))
1318 {
1319 n->constant = rhs;
1320 const_count++;
1321 }
1322 else
1323 n->constant = NULL_TREE;
1324 n->next = *p;
1325 *p = n;
1326
1327 item_count++;
1328 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1329 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1330 break;
1331 }
1332
1333 /* Third stage just goes over the list and creates an appropriate vector of
1334 ipa_agg_jf_item structures out of it, of sourse only if there are
1335 any known constants to begin with. */
1336
1337 if (const_count)
1338 {
1339 jfunc->agg.by_ref = by_ref;
1340 jfunc->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, const_count);
1341 while (list)
1342 {
1343 if (list->constant)
1344 {
1345 struct ipa_agg_jf_item *item;
1346 item = VEC_quick_push (ipa_agg_jf_item_t,
1347 jfunc->agg.items, NULL);
1348 item->offset = list->offset - arg_offset;
1349 item->value = list->constant;
1350 }
1351 list = list->next;
1352 }
1353 }
1354 }
1355
1356 /* Compute jump function for all arguments of callsite CS and insert the
1357 information in the jump_functions array in the ipa_edge_args corresponding
1358 to this callsite. */
1359
1360 static void
1361 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1362 struct cgraph_edge *cs)
1363 {
1364 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1365 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1366 gimple call = cs->call_stmt;
1367 int n, arg_num = gimple_call_num_args (call);
1368
1369 if (arg_num == 0 || args->jump_functions)
1370 return;
1371 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
1372
1373 for (n = 0; n < arg_num; n++)
1374 {
1375 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1376 tree arg = gimple_call_arg (call, n);
1377
1378 if (is_gimple_ip_invariant (arg))
1379 ipa_set_jf_constant (jfunc, arg);
1380 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1381 && TREE_CODE (arg) == PARM_DECL)
1382 {
1383 int index = ipa_get_param_decl_index (info, arg);
1384
1385 gcc_assert (index >=0);
1386 /* Aggregate passed by value, check for pass-through, otherwise we
1387 will attempt to fill in aggregate contents later in this
1388 for cycle. */
1389 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1390 {
1391 ipa_set_jf_simple_pass_through (jfunc, index, false);
1392 continue;
1393 }
1394 }
1395 else if (TREE_CODE (arg) == SSA_NAME)
1396 {
1397 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1398 {
1399 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1400 if (index >= 0
1401 && !detect_type_change_ssa (arg, call, jfunc))
1402 {
1403 bool agg_p;
1404 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1405 call, arg);
1406 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1407 }
1408 }
1409 else
1410 {
1411 gimple stmt = SSA_NAME_DEF_STMT (arg);
1412 if (is_gimple_assign (stmt))
1413 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1414 call, stmt, arg);
1415 else if (gimple_code (stmt) == GIMPLE_PHI)
1416 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1417 call, stmt);
1418 }
1419 }
1420 else
1421 compute_known_type_jump_func (arg, jfunc, call);
1422
1423 if ((jfunc->type != IPA_JF_PASS_THROUGH
1424 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1425 && (jfunc->type != IPA_JF_ANCESTOR
1426 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1427 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1428 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1429 determine_known_aggregate_parts (call, arg, jfunc);
1430 }
1431 }
1432
1433 /* Compute jump functions for all edges - both direct and indirect - outgoing
1434 from NODE. Also count the actual arguments in the process. */
1435
1436 static void
1437 ipa_compute_jump_functions (struct cgraph_node *node,
1438 struct param_analysis_info *parms_ainfo)
1439 {
1440 struct cgraph_edge *cs;
1441
1442 for (cs = node->callees; cs; cs = cs->next_callee)
1443 {
1444 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1445 NULL);
1446 /* We do not need to bother analyzing calls to unknown
1447 functions unless they may become known during lto/whopr. */
1448 if (!callee->analyzed && !flag_lto)
1449 continue;
1450 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1451 }
1452
1453 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1454 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1455 }
1456
1457 /* If STMT looks like a statement loading a value from a member pointer formal
1458 parameter, return that parameter and store the offset of the field to
1459 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1460 might be clobbered). If USE_DELTA, then we look for a use of the delta
1461 field rather than the pfn. */
1462
1463 static tree
1464 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1465 HOST_WIDE_INT *offset_p)
1466 {
1467 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1468
1469 if (!gimple_assign_single_p (stmt))
1470 return NULL_TREE;
1471
1472 rhs = gimple_assign_rhs1 (stmt);
1473 if (TREE_CODE (rhs) == COMPONENT_REF)
1474 {
1475 ref_field = TREE_OPERAND (rhs, 1);
1476 rhs = TREE_OPERAND (rhs, 0);
1477 }
1478 else
1479 ref_field = NULL_TREE;
1480 if (TREE_CODE (rhs) != MEM_REF)
1481 return NULL_TREE;
1482 rec = TREE_OPERAND (rhs, 0);
1483 if (TREE_CODE (rec) != ADDR_EXPR)
1484 return NULL_TREE;
1485 rec = TREE_OPERAND (rec, 0);
1486 if (TREE_CODE (rec) != PARM_DECL
1487 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1488 return NULL_TREE;
1489 ref_offset = TREE_OPERAND (rhs, 1);
1490
1491 if (use_delta)
1492 fld = delta_field;
1493 else
1494 fld = ptr_field;
1495 if (offset_p)
1496 *offset_p = int_bit_position (fld);
1497
1498 if (ref_field)
1499 {
1500 if (integer_nonzerop (ref_offset))
1501 return NULL_TREE;
1502 return ref_field == fld ? rec : NULL_TREE;
1503 }
1504 else
1505 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1506 : NULL_TREE;
1507 }
1508
1509 /* Returns true iff T is an SSA_NAME defined by a statement. */
1510
1511 static bool
1512 ipa_is_ssa_with_stmt_def (tree t)
1513 {
1514 if (TREE_CODE (t) == SSA_NAME
1515 && !SSA_NAME_IS_DEFAULT_DEF (t))
1516 return true;
1517 else
1518 return false;
1519 }
1520
1521 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1522 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1523 indirect call graph edge. */
1524
1525 static struct cgraph_edge *
1526 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1527 {
1528 struct cgraph_edge *cs;
1529
1530 cs = cgraph_edge (node, stmt);
1531 cs->indirect_info->param_index = param_index;
1532 cs->indirect_info->offset = 0;
1533 cs->indirect_info->polymorphic = 0;
1534 cs->indirect_info->agg_contents = 0;
1535 return cs;
1536 }
1537
1538 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1539 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1540 intermediate information about each formal parameter. Currently it checks
1541 whether the call calls a pointer that is a formal parameter and if so, the
1542 parameter is marked with the called flag and an indirect call graph edge
1543 describing the call is created. This is very simple for ordinary pointers
1544 represented in SSA but not-so-nice when it comes to member pointers. The
1545 ugly part of this function does nothing more than trying to match the
1546 pattern of such a call. An example of such a pattern is the gimple dump
1547 below, the call is on the last line:
1548
1549 <bb 2>:
1550 f$__delta_5 = f.__delta;
1551 f$__pfn_24 = f.__pfn;
1552
1553 or
1554 <bb 2>:
1555 f$__delta_5 = MEM[(struct *)&f];
1556 f$__pfn_24 = MEM[(struct *)&f + 4B];
1557
1558 and a few lines below:
1559
1560 <bb 5>
1561 D.2496_3 = (int) f$__pfn_24;
1562 D.2497_4 = D.2496_3 & 1;
1563 if (D.2497_4 != 0)
1564 goto <bb 3>;
1565 else
1566 goto <bb 4>;
1567
1568 <bb 6>:
1569 D.2500_7 = (unsigned int) f$__delta_5;
1570 D.2501_8 = &S + D.2500_7;
1571 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1572 D.2503_10 = *D.2502_9;
1573 D.2504_12 = f$__pfn_24 + -1;
1574 D.2505_13 = (unsigned int) D.2504_12;
1575 D.2506_14 = D.2503_10 + D.2505_13;
1576 D.2507_15 = *D.2506_14;
1577 iftmp.11_16 = (String:: *) D.2507_15;
1578
1579 <bb 7>:
1580 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1581 D.2500_19 = (unsigned int) f$__delta_5;
1582 D.2508_20 = &S + D.2500_19;
1583 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1584
1585 Such patterns are results of simple calls to a member pointer:
1586
1587 int doprinting (int (MyString::* f)(int) const)
1588 {
1589 MyString S ("somestring");
1590
1591 return (S.*f)(4);
1592 }
1593
1594 Moreover, the function also looks for called pointers loaded from aggregates
1595 passed by value or reference. */
1596
1597 static void
1598 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1599 struct ipa_node_params *info,
1600 struct param_analysis_info *parms_ainfo,
1601 gimple call, tree target)
1602 {
1603 gimple def;
1604 tree n1, n2;
1605 gimple d1, d2;
1606 tree rec, rec2, cond;
1607 gimple branch;
1608 int index;
1609 basic_block bb, virt_bb, join;
1610 HOST_WIDE_INT offset;
1611 bool by_ref;
1612
1613 if (SSA_NAME_IS_DEFAULT_DEF (target))
1614 {
1615 tree var = SSA_NAME_VAR (target);
1616 index = ipa_get_param_decl_index (info, var);
1617 if (index >= 0)
1618 ipa_note_param_call (node, index, call);
1619 return;
1620 }
1621
1622 def = SSA_NAME_DEF_STMT (target);
1623 if (gimple_assign_single_p (def)
1624 && ipa_load_from_parm_agg_1 (info, parms_ainfo, def,
1625 gimple_assign_rhs1 (def), &index, &offset,
1626 &by_ref))
1627 {
1628 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1629 cs->indirect_info->offset = offset;
1630 cs->indirect_info->agg_contents = 1;
1631 cs->indirect_info->by_ref = by_ref;
1632 return;
1633 }
1634
1635 /* Now we need to try to match the complex pattern of calling a member
1636 pointer. */
1637 if (gimple_code (def) != GIMPLE_PHI
1638 || gimple_phi_num_args (def) != 2
1639 || !POINTER_TYPE_P (TREE_TYPE (target))
1640 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1641 return;
1642
1643 /* First, we need to check whether one of these is a load from a member
1644 pointer that is a parameter to this function. */
1645 n1 = PHI_ARG_DEF (def, 0);
1646 n2 = PHI_ARG_DEF (def, 1);
1647 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1648 return;
1649 d1 = SSA_NAME_DEF_STMT (n1);
1650 d2 = SSA_NAME_DEF_STMT (n2);
1651
1652 join = gimple_bb (def);
1653 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1654 {
1655 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1656 return;
1657
1658 bb = EDGE_PRED (join, 0)->src;
1659 virt_bb = gimple_bb (d2);
1660 }
1661 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1662 {
1663 bb = EDGE_PRED (join, 1)->src;
1664 virt_bb = gimple_bb (d1);
1665 }
1666 else
1667 return;
1668
1669 /* Second, we need to check that the basic blocks are laid out in the way
1670 corresponding to the pattern. */
1671
1672 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1673 || single_pred (virt_bb) != bb
1674 || single_succ (virt_bb) != join)
1675 return;
1676
1677 /* Third, let's see that the branching is done depending on the least
1678 significant bit of the pfn. */
1679
1680 branch = last_stmt (bb);
1681 if (!branch || gimple_code (branch) != GIMPLE_COND)
1682 return;
1683
1684 if ((gimple_cond_code (branch) != NE_EXPR
1685 && gimple_cond_code (branch) != EQ_EXPR)
1686 || !integer_zerop (gimple_cond_rhs (branch)))
1687 return;
1688
1689 cond = gimple_cond_lhs (branch);
1690 if (!ipa_is_ssa_with_stmt_def (cond))
1691 return;
1692
1693 def = SSA_NAME_DEF_STMT (cond);
1694 if (!is_gimple_assign (def)
1695 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1696 || !integer_onep (gimple_assign_rhs2 (def)))
1697 return;
1698
1699 cond = gimple_assign_rhs1 (def);
1700 if (!ipa_is_ssa_with_stmt_def (cond))
1701 return;
1702
1703 def = SSA_NAME_DEF_STMT (cond);
1704
1705 if (is_gimple_assign (def)
1706 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1707 {
1708 cond = gimple_assign_rhs1 (def);
1709 if (!ipa_is_ssa_with_stmt_def (cond))
1710 return;
1711 def = SSA_NAME_DEF_STMT (cond);
1712 }
1713
1714 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1715 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1716 == ptrmemfunc_vbit_in_delta),
1717 NULL);
1718 if (rec != rec2)
1719 return;
1720
1721 index = ipa_get_param_decl_index (info, rec);
1722 if (index >= 0
1723 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1724 {
1725 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1726 cs->indirect_info->offset = offset;
1727 cs->indirect_info->agg_contents = 1;
1728 }
1729
1730 return;
1731 }
1732
1733 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1734 object referenced in the expression is a formal parameter of the caller
1735 (described by INFO), create a call note for the statement. */
1736
1737 static void
1738 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1739 struct ipa_node_params *info, gimple call,
1740 tree target)
1741 {
1742 struct cgraph_edge *cs;
1743 struct cgraph_indirect_call_info *ii;
1744 struct ipa_jump_func jfunc;
1745 tree obj = OBJ_TYPE_REF_OBJECT (target);
1746 int index;
1747 HOST_WIDE_INT anc_offset;
1748
1749 if (!flag_devirtualize)
1750 return;
1751
1752 if (TREE_CODE (obj) != SSA_NAME)
1753 return;
1754
1755 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1756 {
1757 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1758 return;
1759
1760 anc_offset = 0;
1761 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1762 gcc_assert (index >= 0);
1763 if (detect_type_change_ssa (obj, call, &jfunc))
1764 return;
1765 }
1766 else
1767 {
1768 gimple stmt = SSA_NAME_DEF_STMT (obj);
1769 tree expr;
1770
1771 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1772 if (!expr)
1773 return;
1774 index = ipa_get_param_decl_index (info,
1775 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1776 gcc_assert (index >= 0);
1777 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1778 return;
1779 }
1780
1781 cs = ipa_note_param_call (node, index, call);
1782 ii = cs->indirect_info;
1783 ii->offset = anc_offset;
1784 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1785 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1786 ii->polymorphic = 1;
1787 }
1788
1789 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1790 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1791 containing intermediate information about each formal parameter. */
1792
1793 static void
1794 ipa_analyze_call_uses (struct cgraph_node *node,
1795 struct ipa_node_params *info,
1796 struct param_analysis_info *parms_ainfo, gimple call)
1797 {
1798 tree target = gimple_call_fn (call);
1799
1800 if (!target)
1801 return;
1802 if (TREE_CODE (target) == SSA_NAME)
1803 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1804 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1805 ipa_analyze_virtual_call_uses (node, info, call, target);
1806 }
1807
1808
1809 /* Analyze the call statement STMT with respect to formal parameters (described
1810 in INFO) of caller given by NODE. Currently it only checks whether formal
1811 parameters are called. PARMS_AINFO is a pointer to a vector containing
1812 intermediate information about each formal parameter. */
1813
1814 static void
1815 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1816 struct param_analysis_info *parms_ainfo, gimple stmt)
1817 {
1818 if (is_gimple_call (stmt))
1819 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1820 }
1821
1822 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1823 If OP is a parameter declaration, mark it as used in the info structure
1824 passed in DATA. */
1825
1826 static bool
1827 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1828 tree op, void *data)
1829 {
1830 struct ipa_node_params *info = (struct ipa_node_params *) data;
1831
1832 op = get_base_address (op);
1833 if (op
1834 && TREE_CODE (op) == PARM_DECL)
1835 {
1836 int index = ipa_get_param_decl_index (info, op);
1837 gcc_assert (index >= 0);
1838 ipa_set_param_used (info, index, true);
1839 }
1840
1841 return false;
1842 }
1843
1844 /* Scan the function body of NODE and inspect the uses of formal parameters.
1845 Store the findings in various structures of the associated ipa_node_params
1846 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1847 vector containing intermediate information about each formal parameter. */
1848
1849 static void
1850 ipa_analyze_params_uses (struct cgraph_node *node,
1851 struct param_analysis_info *parms_ainfo)
1852 {
1853 tree decl = node->symbol.decl;
1854 basic_block bb;
1855 struct function *func;
1856 gimple_stmt_iterator gsi;
1857 struct ipa_node_params *info = IPA_NODE_REF (node);
1858 int i;
1859
1860 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1861 return;
1862
1863 for (i = 0; i < ipa_get_param_count (info); i++)
1864 {
1865 tree parm = ipa_get_param (info, i);
1866 tree ddef;
1867 /* For SSA regs see if parameter is used. For non-SSA we compute
1868 the flag during modification analysis. */
1869 if (is_gimple_reg (parm)
1870 && (ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1871 parm)) != NULL_TREE
1872 && !has_zero_uses (ddef))
1873 ipa_set_param_used (info, i, true);
1874 }
1875
1876 func = DECL_STRUCT_FUNCTION (decl);
1877 FOR_EACH_BB_FN (bb, func)
1878 {
1879 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1880 {
1881 gimple stmt = gsi_stmt (gsi);
1882
1883 if (is_gimple_debug (stmt))
1884 continue;
1885
1886 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1887 walk_stmt_load_store_addr_ops (stmt, info,
1888 visit_ref_for_mod_analysis,
1889 visit_ref_for_mod_analysis,
1890 visit_ref_for_mod_analysis);
1891 }
1892 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1893 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1894 visit_ref_for_mod_analysis,
1895 visit_ref_for_mod_analysis,
1896 visit_ref_for_mod_analysis);
1897 }
1898
1899 info->uses_analysis_done = 1;
1900 }
1901
1902 /* Initialize the array describing properties of of formal parameters
1903 of NODE, analyze their uses and compute jump functions associated
1904 with actual arguments of calls from within NODE. */
1905
1906 void
1907 ipa_analyze_node (struct cgraph_node *node)
1908 {
1909 struct ipa_node_params *info;
1910 struct param_analysis_info *parms_ainfo;
1911 int i, param_count;
1912
1913 ipa_check_create_node_params ();
1914 ipa_check_create_edge_args ();
1915 info = IPA_NODE_REF (node);
1916 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
1917 current_function_decl = node->symbol.decl;
1918 ipa_initialize_node_params (node);
1919
1920 param_count = ipa_get_param_count (info);
1921 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1922 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1923
1924 ipa_analyze_params_uses (node, parms_ainfo);
1925 ipa_compute_jump_functions (node, parms_ainfo);
1926
1927 for (i = 0; i < param_count; i++)
1928 {
1929 if (parms_ainfo[i].parm_visited_statements)
1930 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
1931 if (parms_ainfo[i].pt_visited_statements)
1932 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
1933 }
1934
1935 current_function_decl = NULL;
1936 pop_cfun ();
1937 }
1938
1939
1940 /* Update the jump function DST when the call graph edge corresponding to SRC is
1941 is being inlined, knowing that DST is of type ancestor and src of known
1942 type. */
1943
1944 static void
1945 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1946 struct ipa_jump_func *dst)
1947 {
1948 HOST_WIDE_INT combined_offset;
1949 tree combined_type;
1950
1951 combined_offset = ipa_get_jf_known_type_offset (src)
1952 + ipa_get_jf_ancestor_offset (dst);
1953 combined_type = ipa_get_jf_ancestor_type (dst);
1954
1955 ipa_set_jf_known_type (dst, combined_offset,
1956 ipa_get_jf_known_type_base_type (src),
1957 combined_type);
1958 }
1959
1960 /* Update the jump functions associated with call graph edge E when the call
1961 graph edge CS is being inlined, assuming that E->caller is already (possibly
1962 indirectly) inlined into CS->callee and that E has not been inlined. */
1963
1964 static void
1965 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1966 struct cgraph_edge *e)
1967 {
1968 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1969 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1970 int count = ipa_get_cs_argument_count (args);
1971 int i;
1972
1973 for (i = 0; i < count; i++)
1974 {
1975 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1976
1977 if (dst->type == IPA_JF_ANCESTOR)
1978 {
1979 struct ipa_jump_func *src;
1980 int dst_fid = dst->value.ancestor.formal_id;
1981
1982 /* Variable number of arguments can cause havoc if we try to access
1983 one that does not exist in the inlined edge. So make sure we
1984 don't. */
1985 if (dst_fid >= ipa_get_cs_argument_count (top))
1986 {
1987 dst->type = IPA_JF_UNKNOWN;
1988 continue;
1989 }
1990
1991 src = ipa_get_ith_jump_func (top, dst_fid);
1992
1993 if (src->agg.items
1994 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
1995 {
1996 struct ipa_agg_jf_item *item;
1997 int j;
1998
1999 /* Currently we do not produce clobber aggregate jump functions,
2000 replace with merging when we do. */
2001 gcc_assert (!dst->agg.items);
2002
2003 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc, src->agg.items);
2004 dst->agg.by_ref = src->agg.by_ref;
2005 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, dst->agg.items, j, item)
2006 item->offset -= dst->value.ancestor.offset;
2007 }
2008
2009 if (src->type == IPA_JF_KNOWN_TYPE)
2010 combine_known_type_and_ancestor_jfs (src, dst);
2011 else if (src->type == IPA_JF_PASS_THROUGH
2012 && src->value.pass_through.operation == NOP_EXPR)
2013 {
2014 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2015 dst->value.ancestor.agg_preserved &=
2016 src->value.pass_through.agg_preserved;
2017 }
2018 else if (src->type == IPA_JF_ANCESTOR)
2019 {
2020 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2021 dst->value.ancestor.offset += src->value.ancestor.offset;
2022 dst->value.ancestor.agg_preserved &=
2023 src->value.ancestor.agg_preserved;
2024 }
2025 else
2026 dst->type = IPA_JF_UNKNOWN;
2027 }
2028 else if (dst->type == IPA_JF_PASS_THROUGH)
2029 {
2030 struct ipa_jump_func *src;
2031 /* We must check range due to calls with variable number of arguments
2032 and we cannot combine jump functions with operations. */
2033 if (dst->value.pass_through.operation == NOP_EXPR
2034 && (dst->value.pass_through.formal_id
2035 < ipa_get_cs_argument_count (top)))
2036 {
2037 bool agg_p;
2038 int dst_fid = dst->value.pass_through.formal_id;
2039 src = ipa_get_ith_jump_func (top, dst_fid);
2040 agg_p = dst->value.pass_through.agg_preserved;
2041
2042 dst->type = src->type;
2043 dst->value = src->value;
2044
2045 if (src->agg.items
2046 && (agg_p || !src->agg.by_ref))
2047 {
2048 /* Currently we do not produce clobber aggregate jump
2049 functions, replace with merging when we do. */
2050 gcc_assert (!dst->agg.items);
2051
2052 dst->agg.by_ref = src->agg.by_ref;
2053 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc,
2054 src->agg.items);
2055 }
2056
2057 if (!agg_p)
2058 {
2059 if (dst->type == IPA_JF_PASS_THROUGH)
2060 dst->value.pass_through.agg_preserved = false;
2061 else if (dst->type == IPA_JF_ANCESTOR)
2062 dst->value.ancestor.agg_preserved = false;
2063 }
2064 }
2065 else
2066 dst->type = IPA_JF_UNKNOWN;
2067 }
2068 }
2069 }
2070
2071 /* If TARGET is an addr_expr of a function declaration, make it the destination
2072 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2073
2074 struct cgraph_edge *
2075 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2076 {
2077 struct cgraph_node *callee;
2078
2079 if (TREE_CODE (target) == ADDR_EXPR)
2080 target = TREE_OPERAND (target, 0);
2081 if (TREE_CODE (target) != FUNCTION_DECL)
2082 return NULL;
2083 callee = cgraph_get_node (target);
2084 if (!callee)
2085 return NULL;
2086 ipa_check_create_node_params ();
2087
2088 /* We can not make edges to inline clones. It is bug that someone removed
2089 the cgraph node too early. */
2090 gcc_assert (!callee->global.inlined_to);
2091
2092 cgraph_make_edge_direct (ie, callee);
2093 if (dump_file)
2094 {
2095 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2096 "(%s/%i -> %s/%i), for stmt ",
2097 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2098 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
2099 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
2100 if (ie->call_stmt)
2101 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2102 else
2103 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2104 }
2105 callee = cgraph_function_or_thunk_node (callee, NULL);
2106
2107 return ie;
2108 }
2109
2110 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2111 return NULL if there is not any. BY_REF specifies whether the value has to
2112 be passed by reference or by value. */
2113
2114 tree
2115 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2116 HOST_WIDE_INT offset, bool by_ref)
2117 {
2118 struct ipa_agg_jf_item *item;
2119 int i;
2120
2121 if (by_ref != agg->by_ref)
2122 return NULL;
2123
2124 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, agg->items, i, item)
2125 {
2126 if (item->offset == offset)
2127 {
2128 /* Currently we do not have clobber values, return NULL for them once
2129 we do. */
2130 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2131 return item->value;
2132 }
2133 else if (item->offset > offset)
2134 return NULL;
2135 }
2136 return NULL;
2137 }
2138
2139 /* Try to find a destination for indirect edge IE that corresponds to a simple
2140 call or a call of a member function pointer and where the destination is a
2141 pointer formal parameter described by jump function JFUNC. If it can be
2142 determined, return the newly direct edge, otherwise return NULL. */
2143
2144 static struct cgraph_edge *
2145 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2146 struct ipa_jump_func *jfunc)
2147 {
2148 tree target;
2149
2150 if (ie->indirect_info->agg_contents)
2151 {
2152 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2153 ie->indirect_info->offset,
2154 ie->indirect_info->by_ref);
2155 if (!target)
2156 return NULL;
2157 }
2158 else
2159 {
2160 if (jfunc->type != IPA_JF_CONST)
2161 return NULL;
2162 target = ipa_get_jf_constant (jfunc);
2163 }
2164 return ipa_make_edge_direct_to_target (ie, target);
2165 }
2166
2167 /* Try to find a destination for indirect edge IE that corresponds to a
2168 virtual call based on a formal parameter which is described by jump
2169 function JFUNC and if it can be determined, make it direct and return the
2170 direct edge. Otherwise, return NULL. */
2171
2172 static struct cgraph_edge *
2173 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2174 struct ipa_jump_func *jfunc)
2175 {
2176 tree binfo, target;
2177
2178 if (jfunc->type != IPA_JF_KNOWN_TYPE)
2179 return NULL;
2180
2181 binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
2182 gcc_checking_assert (binfo);
2183 binfo = get_binfo_at_offset (binfo, ipa_get_jf_known_type_offset (jfunc)
2184 + ie->indirect_info->offset,
2185 ie->indirect_info->otr_type);
2186 if (binfo)
2187 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2188 binfo);
2189 else
2190 return NULL;
2191
2192 if (target)
2193 return ipa_make_edge_direct_to_target (ie, target);
2194 else
2195 return NULL;
2196 }
2197
2198 /* Update the param called notes associated with NODE when CS is being inlined,
2199 assuming NODE is (potentially indirectly) inlined into CS->callee.
2200 Moreover, if the callee is discovered to be constant, create a new cgraph
2201 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2202 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2203
2204 static bool
2205 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2206 struct cgraph_node *node,
2207 VEC (cgraph_edge_p, heap) **new_edges)
2208 {
2209 struct ipa_edge_args *top;
2210 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2211 bool res = false;
2212
2213 ipa_check_create_edge_args ();
2214 top = IPA_EDGE_REF (cs);
2215
2216 for (ie = node->indirect_calls; ie; ie = next_ie)
2217 {
2218 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2219 struct ipa_jump_func *jfunc;
2220 int param_index;
2221
2222 next_ie = ie->next_callee;
2223
2224 if (ici->param_index == -1)
2225 continue;
2226
2227 /* We must check range due to calls with variable number of arguments: */
2228 if (ici->param_index >= ipa_get_cs_argument_count (top))
2229 {
2230 ici->param_index = -1;
2231 continue;
2232 }
2233
2234 param_index = ici->param_index;
2235 jfunc = ipa_get_ith_jump_func (top, param_index);
2236 if (jfunc->type == IPA_JF_PASS_THROUGH
2237 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2238 {
2239 if (ici->agg_contents
2240 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2241 ici->param_index = -1;
2242 else
2243 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2244 }
2245 else if (jfunc->type == IPA_JF_ANCESTOR)
2246 {
2247 if (ici->agg_contents
2248 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2249 ici->param_index = -1;
2250 else
2251 {
2252 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2253 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2254 }
2255 }
2256 else
2257 /* Either we can find a destination for this edge now or never. */
2258 ici->param_index = -1;
2259
2260 if (!flag_indirect_inlining)
2261 continue;
2262
2263 if (ici->polymorphic)
2264 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
2265 else
2266 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
2267
2268 if (new_direct_edge)
2269 {
2270 new_direct_edge->indirect_inlining_edge = 1;
2271 if (new_direct_edge->call_stmt)
2272 new_direct_edge->call_stmt_cannot_inline_p
2273 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
2274 new_direct_edge->callee->symbol.decl);
2275 if (new_edges)
2276 {
2277 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
2278 new_direct_edge);
2279 top = IPA_EDGE_REF (cs);
2280 res = true;
2281 }
2282 }
2283 }
2284
2285 return res;
2286 }
2287
2288 /* Recursively traverse subtree of NODE (including node) made of inlined
2289 cgraph_edges when CS has been inlined and invoke
2290 update_indirect_edges_after_inlining on all nodes and
2291 update_jump_functions_after_inlining on all non-inlined edges that lead out
2292 of this subtree. Newly discovered indirect edges will be added to
2293 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2294 created. */
2295
2296 static bool
2297 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2298 struct cgraph_node *node,
2299 VEC (cgraph_edge_p, heap) **new_edges)
2300 {
2301 struct cgraph_edge *e;
2302 bool res;
2303
2304 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2305
2306 for (e = node->callees; e; e = e->next_callee)
2307 if (!e->inline_failed)
2308 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2309 else
2310 update_jump_functions_after_inlining (cs, e);
2311 for (e = node->indirect_calls; e; e = e->next_callee)
2312 update_jump_functions_after_inlining (cs, e);
2313
2314 return res;
2315 }
2316
2317 /* Update jump functions and call note functions on inlining the call site CS.
2318 CS is expected to lead to a node already cloned by
2319 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2320 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2321 created. */
2322
2323 bool
2324 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2325 VEC (cgraph_edge_p, heap) **new_edges)
2326 {
2327 bool changed;
2328 /* Do nothing if the preparation phase has not been carried out yet
2329 (i.e. during early inlining). */
2330 if (!ipa_node_params_vector)
2331 return false;
2332 gcc_assert (ipa_edge_args_vector);
2333
2334 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2335
2336 /* We do not keep jump functions of inlined edges up to date. Better to free
2337 them so we do not access them accidentally. */
2338 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2339 return changed;
2340 }
2341
2342 /* Frees all dynamically allocated structures that the argument info points
2343 to. */
2344
2345 void
2346 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2347 {
2348 if (args->jump_functions)
2349 ggc_free (args->jump_functions);
2350
2351 memset (args, 0, sizeof (*args));
2352 }
2353
2354 /* Free all ipa_edge structures. */
2355
2356 void
2357 ipa_free_all_edge_args (void)
2358 {
2359 int i;
2360 struct ipa_edge_args *args;
2361
2362 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
2363 ipa_free_edge_args_substructures (args);
2364
2365 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
2366 ipa_edge_args_vector = NULL;
2367 }
2368
2369 /* Frees all dynamically allocated structures that the param info points
2370 to. */
2371
2372 void
2373 ipa_free_node_params_substructures (struct ipa_node_params *info)
2374 {
2375 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
2376 free (info->lattices);
2377 /* Lattice values and their sources are deallocated with their alocation
2378 pool. */
2379 VEC_free (tree, heap, info->known_vals);
2380 memset (info, 0, sizeof (*info));
2381 }
2382
2383 /* Free all ipa_node_params structures. */
2384
2385 void
2386 ipa_free_all_node_params (void)
2387 {
2388 int i;
2389 struct ipa_node_params *info;
2390
2391 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
2392 ipa_free_node_params_substructures (info);
2393
2394 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
2395 ipa_node_params_vector = NULL;
2396 }
2397
2398 /* Hook that is called by cgraph.c when an edge is removed. */
2399
2400 static void
2401 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2402 {
2403 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2404 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
2405 <= (unsigned)cs->uid)
2406 return;
2407 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2408 }
2409
2410 /* Hook that is called by cgraph.c when a node is removed. */
2411
2412 static void
2413 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2414 {
2415 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2416 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
2417 <= (unsigned)node->uid)
2418 return;
2419 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2420 }
2421
2422 /* Hook that is called by cgraph.c when an edge is duplicated. */
2423
2424 static void
2425 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2426 __attribute__((unused)) void *data)
2427 {
2428 struct ipa_edge_args *old_args, *new_args;
2429 unsigned int i;
2430
2431 ipa_check_create_edge_args ();
2432
2433 old_args = IPA_EDGE_REF (src);
2434 new_args = IPA_EDGE_REF (dst);
2435
2436 new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
2437 old_args->jump_functions);
2438
2439 for (i = 0; i < VEC_length (ipa_jump_func_t, old_args->jump_functions); i++)
2440 VEC_index (ipa_jump_func_t, new_args->jump_functions, i).agg.items
2441 = VEC_copy (ipa_agg_jf_item_t, gc,
2442 VEC_index (ipa_jump_func_t,
2443 old_args->jump_functions, i).agg.items);
2444 }
2445
2446 /* Hook that is called by cgraph.c when a node is duplicated. */
2447
2448 static void
2449 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2450 ATTRIBUTE_UNUSED void *data)
2451 {
2452 struct ipa_node_params *old_info, *new_info;
2453
2454 ipa_check_create_node_params ();
2455 old_info = IPA_NODE_REF (src);
2456 new_info = IPA_NODE_REF (dst);
2457
2458 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
2459 old_info->descriptors);
2460 new_info->lattices = NULL;
2461 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2462
2463 new_info->uses_analysis_done = old_info->uses_analysis_done;
2464 new_info->node_enqueued = old_info->node_enqueued;
2465 }
2466
2467
2468 /* Analyze newly added function into callgraph. */
2469
2470 static void
2471 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2472 {
2473 ipa_analyze_node (node);
2474 }
2475
2476 /* Register our cgraph hooks if they are not already there. */
2477
2478 void
2479 ipa_register_cgraph_hooks (void)
2480 {
2481 if (!edge_removal_hook_holder)
2482 edge_removal_hook_holder =
2483 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2484 if (!node_removal_hook_holder)
2485 node_removal_hook_holder =
2486 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2487 if (!edge_duplication_hook_holder)
2488 edge_duplication_hook_holder =
2489 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2490 if (!node_duplication_hook_holder)
2491 node_duplication_hook_holder =
2492 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2493 function_insertion_hook_holder =
2494 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2495 }
2496
2497 /* Unregister our cgraph hooks if they are not already there. */
2498
2499 static void
2500 ipa_unregister_cgraph_hooks (void)
2501 {
2502 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2503 edge_removal_hook_holder = NULL;
2504 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2505 node_removal_hook_holder = NULL;
2506 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2507 edge_duplication_hook_holder = NULL;
2508 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2509 node_duplication_hook_holder = NULL;
2510 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2511 function_insertion_hook_holder = NULL;
2512 }
2513
2514 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2515 longer needed after ipa-cp. */
2516
2517 void
2518 ipa_free_all_structures_after_ipa_cp (void)
2519 {
2520 if (!optimize)
2521 {
2522 ipa_free_all_edge_args ();
2523 ipa_free_all_node_params ();
2524 free_alloc_pool (ipcp_sources_pool);
2525 free_alloc_pool (ipcp_values_pool);
2526 ipa_unregister_cgraph_hooks ();
2527 }
2528 }
2529
2530 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2531 longer needed after indirect inlining. */
2532
2533 void
2534 ipa_free_all_structures_after_iinln (void)
2535 {
2536 ipa_free_all_edge_args ();
2537 ipa_free_all_node_params ();
2538 ipa_unregister_cgraph_hooks ();
2539 if (ipcp_sources_pool)
2540 free_alloc_pool (ipcp_sources_pool);
2541 if (ipcp_values_pool)
2542 free_alloc_pool (ipcp_values_pool);
2543 }
2544
2545 /* Print ipa_tree_map data structures of all functions in the
2546 callgraph to F. */
2547
2548 void
2549 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2550 {
2551 int i, count;
2552 tree temp;
2553 struct ipa_node_params *info;
2554
2555 if (!node->analyzed)
2556 return;
2557 info = IPA_NODE_REF (node);
2558 fprintf (f, " function %s parameter descriptors:\n",
2559 cgraph_node_name (node));
2560 count = ipa_get_param_count (info);
2561 for (i = 0; i < count; i++)
2562 {
2563 temp = ipa_get_param (info, i);
2564 if (TREE_CODE (temp) == PARM_DECL)
2565 fprintf (f, " param %d : %s", i,
2566 (DECL_NAME (temp)
2567 ? (*lang_hooks.decl_printable_name) (temp, 2)
2568 : "(unnamed)"));
2569 if (ipa_is_param_used (info, i))
2570 fprintf (f, " used");
2571 fprintf (f, "\n");
2572 }
2573 }
2574
2575 /* Print ipa_tree_map data structures of all functions in the
2576 callgraph to F. */
2577
2578 void
2579 ipa_print_all_params (FILE * f)
2580 {
2581 struct cgraph_node *node;
2582
2583 fprintf (f, "\nFunction parameters:\n");
2584 FOR_EACH_FUNCTION (node)
2585 ipa_print_node_params (f, node);
2586 }
2587
2588 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2589
2590 VEC(tree, heap) *
2591 ipa_get_vector_of_formal_parms (tree fndecl)
2592 {
2593 VEC(tree, heap) *args;
2594 int count;
2595 tree parm;
2596
2597 count = count_formal_params (fndecl);
2598 args = VEC_alloc (tree, heap, count);
2599 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2600 VEC_quick_push (tree, args, parm);
2601
2602 return args;
2603 }
2604
2605 /* Return a heap allocated vector containing types of formal parameters of
2606 function type FNTYPE. */
2607
2608 static inline VEC(tree, heap) *
2609 get_vector_of_formal_parm_types (tree fntype)
2610 {
2611 VEC(tree, heap) *types;
2612 int count = 0;
2613 tree t;
2614
2615 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2616 count++;
2617
2618 types = VEC_alloc (tree, heap, count);
2619 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2620 VEC_quick_push (tree, types, TREE_VALUE (t));
2621
2622 return types;
2623 }
2624
2625 /* Modify the function declaration FNDECL and its type according to the plan in
2626 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2627 to reflect the actual parameters being modified which are determined by the
2628 base_index field. */
2629
2630 void
2631 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2632 const char *synth_parm_prefix)
2633 {
2634 VEC(tree, heap) *oparms, *otypes;
2635 tree orig_type, new_type = NULL;
2636 tree old_arg_types, t, new_arg_types = NULL;
2637 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2638 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2639 tree new_reversed = NULL;
2640 bool care_for_types, last_parm_void;
2641
2642 if (!synth_parm_prefix)
2643 synth_parm_prefix = "SYNTH";
2644
2645 oparms = ipa_get_vector_of_formal_parms (fndecl);
2646 orig_type = TREE_TYPE (fndecl);
2647 old_arg_types = TYPE_ARG_TYPES (orig_type);
2648
2649 /* The following test is an ugly hack, some functions simply don't have any
2650 arguments in their type. This is probably a bug but well... */
2651 care_for_types = (old_arg_types != NULL_TREE);
2652 if (care_for_types)
2653 {
2654 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2655 == void_type_node);
2656 otypes = get_vector_of_formal_parm_types (orig_type);
2657 if (last_parm_void)
2658 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2659 else
2660 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2661 }
2662 else
2663 {
2664 last_parm_void = false;
2665 otypes = NULL;
2666 }
2667
2668 for (i = 0; i < len; i++)
2669 {
2670 struct ipa_parm_adjustment *adj;
2671 gcc_assert (link);
2672
2673 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2674 parm = VEC_index (tree, oparms, adj->base_index);
2675 adj->base = parm;
2676
2677 if (adj->copy_param)
2678 {
2679 if (care_for_types)
2680 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2681 adj->base_index),
2682 new_arg_types);
2683 *link = parm;
2684 link = &DECL_CHAIN (parm);
2685 }
2686 else if (!adj->remove_param)
2687 {
2688 tree new_parm;
2689 tree ptype;
2690
2691 if (adj->by_ref)
2692 ptype = build_pointer_type (adj->type);
2693 else
2694 ptype = adj->type;
2695
2696 if (care_for_types)
2697 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2698
2699 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2700 ptype);
2701 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2702
2703 DECL_ARTIFICIAL (new_parm) = 1;
2704 DECL_ARG_TYPE (new_parm) = ptype;
2705 DECL_CONTEXT (new_parm) = fndecl;
2706 TREE_USED (new_parm) = 1;
2707 DECL_IGNORED_P (new_parm) = 1;
2708 layout_decl (new_parm, 0);
2709
2710 adj->base = parm;
2711 adj->reduction = new_parm;
2712
2713 *link = new_parm;
2714
2715 link = &DECL_CHAIN (new_parm);
2716 }
2717 }
2718
2719 *link = NULL_TREE;
2720
2721 if (care_for_types)
2722 {
2723 new_reversed = nreverse (new_arg_types);
2724 if (last_parm_void)
2725 {
2726 if (new_reversed)
2727 TREE_CHAIN (new_arg_types) = void_list_node;
2728 else
2729 new_reversed = void_list_node;
2730 }
2731 }
2732
2733 /* Use copy_node to preserve as much as possible from original type
2734 (debug info, attribute lists etc.)
2735 Exception is METHOD_TYPEs must have THIS argument.
2736 When we are asked to remove it, we need to build new FUNCTION_TYPE
2737 instead. */
2738 if (TREE_CODE (orig_type) != METHOD_TYPE
2739 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0).copy_param
2740 && VEC_index (ipa_parm_adjustment_t, adjustments, 0).base_index == 0))
2741 {
2742 new_type = build_distinct_type_copy (orig_type);
2743 TYPE_ARG_TYPES (new_type) = new_reversed;
2744 }
2745 else
2746 {
2747 new_type
2748 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2749 new_reversed));
2750 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2751 DECL_VINDEX (fndecl) = NULL_TREE;
2752 }
2753
2754 /* When signature changes, we need to clear builtin info. */
2755 if (DECL_BUILT_IN (fndecl))
2756 {
2757 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2758 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2759 }
2760
2761 /* This is a new type, not a copy of an old type. Need to reassociate
2762 variants. We can handle everything except the main variant lazily. */
2763 t = TYPE_MAIN_VARIANT (orig_type);
2764 if (orig_type != t)
2765 {
2766 TYPE_MAIN_VARIANT (new_type) = t;
2767 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2768 TYPE_NEXT_VARIANT (t) = new_type;
2769 }
2770 else
2771 {
2772 TYPE_MAIN_VARIANT (new_type) = new_type;
2773 TYPE_NEXT_VARIANT (new_type) = NULL;
2774 }
2775
2776 TREE_TYPE (fndecl) = new_type;
2777 DECL_VIRTUAL_P (fndecl) = 0;
2778 if (otypes)
2779 VEC_free (tree, heap, otypes);
2780 VEC_free (tree, heap, oparms);
2781 }
2782
2783 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2784 If this is a directly recursive call, CS must be NULL. Otherwise it must
2785 contain the corresponding call graph edge. */
2786
2787 void
2788 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2789 ipa_parm_adjustment_vec adjustments)
2790 {
2791 VEC(tree, heap) *vargs;
2792 VEC(tree, gc) **debug_args = NULL;
2793 gimple new_stmt;
2794 gimple_stmt_iterator gsi;
2795 tree callee_decl;
2796 int i, len;
2797
2798 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2799 vargs = VEC_alloc (tree, heap, len);
2800 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
2801
2802 gsi = gsi_for_stmt (stmt);
2803 for (i = 0; i < len; i++)
2804 {
2805 struct ipa_parm_adjustment *adj;
2806
2807 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2808
2809 if (adj->copy_param)
2810 {
2811 tree arg = gimple_call_arg (stmt, adj->base_index);
2812
2813 VEC_quick_push (tree, vargs, arg);
2814 }
2815 else if (!adj->remove_param)
2816 {
2817 tree expr, base, off;
2818 location_t loc;
2819
2820 /* We create a new parameter out of the value of the old one, we can
2821 do the following kind of transformations:
2822
2823 - A scalar passed by reference is converted to a scalar passed by
2824 value. (adj->by_ref is false and the type of the original
2825 actual argument is a pointer to a scalar).
2826
2827 - A part of an aggregate is passed instead of the whole aggregate.
2828 The part can be passed either by value or by reference, this is
2829 determined by value of adj->by_ref. Moreover, the code below
2830 handles both situations when the original aggregate is passed by
2831 value (its type is not a pointer) and when it is passed by
2832 reference (it is a pointer to an aggregate).
2833
2834 When the new argument is passed by reference (adj->by_ref is true)
2835 it must be a part of an aggregate and therefore we form it by
2836 simply taking the address of a reference inside the original
2837 aggregate. */
2838
2839 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2840 base = gimple_call_arg (stmt, adj->base_index);
2841 loc = EXPR_LOCATION (base);
2842
2843 if (TREE_CODE (base) != ADDR_EXPR
2844 && POINTER_TYPE_P (TREE_TYPE (base)))
2845 off = build_int_cst (adj->alias_ptr_type,
2846 adj->offset / BITS_PER_UNIT);
2847 else
2848 {
2849 HOST_WIDE_INT base_offset;
2850 tree prev_base;
2851
2852 if (TREE_CODE (base) == ADDR_EXPR)
2853 base = TREE_OPERAND (base, 0);
2854 prev_base = base;
2855 base = get_addr_base_and_unit_offset (base, &base_offset);
2856 /* Aggregate arguments can have non-invariant addresses. */
2857 if (!base)
2858 {
2859 base = build_fold_addr_expr (prev_base);
2860 off = build_int_cst (adj->alias_ptr_type,
2861 adj->offset / BITS_PER_UNIT);
2862 }
2863 else if (TREE_CODE (base) == MEM_REF)
2864 {
2865 off = build_int_cst (adj->alias_ptr_type,
2866 base_offset
2867 + adj->offset / BITS_PER_UNIT);
2868 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2869 off);
2870 base = TREE_OPERAND (base, 0);
2871 }
2872 else
2873 {
2874 off = build_int_cst (adj->alias_ptr_type,
2875 base_offset
2876 + adj->offset / BITS_PER_UNIT);
2877 base = build_fold_addr_expr (base);
2878 }
2879 }
2880
2881 if (!adj->by_ref)
2882 {
2883 tree type = adj->type;
2884 unsigned int align;
2885 unsigned HOST_WIDE_INT misalign;
2886
2887 get_pointer_alignment_1 (base, &align, &misalign);
2888 misalign += (tree_to_double_int (off)
2889 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
2890 * BITS_PER_UNIT);
2891 misalign = misalign & (align - 1);
2892 if (misalign != 0)
2893 align = (misalign & -misalign);
2894 if (align < TYPE_ALIGN (type))
2895 type = build_aligned_type (type, align);
2896 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2897 }
2898 else
2899 {
2900 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2901 expr = build_fold_addr_expr (expr);
2902 }
2903
2904 expr = force_gimple_operand_gsi (&gsi, expr,
2905 adj->by_ref
2906 || is_gimple_reg_type (adj->type),
2907 NULL, true, GSI_SAME_STMT);
2908 VEC_quick_push (tree, vargs, expr);
2909 }
2910 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2911 {
2912 unsigned int ix;
2913 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2914 gimple def_temp;
2915
2916 arg = gimple_call_arg (stmt, adj->base_index);
2917 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2918 {
2919 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2920 continue;
2921 arg = fold_convert_loc (gimple_location (stmt),
2922 TREE_TYPE (origin), arg);
2923 }
2924 if (debug_args == NULL)
2925 debug_args = decl_debug_args_insert (callee_decl);
2926 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
2927 if (ddecl == origin)
2928 {
2929 ddecl = VEC_index (tree, *debug_args, ix + 1);
2930 break;
2931 }
2932 if (ddecl == NULL)
2933 {
2934 ddecl = make_node (DEBUG_EXPR_DECL);
2935 DECL_ARTIFICIAL (ddecl) = 1;
2936 TREE_TYPE (ddecl) = TREE_TYPE (origin);
2937 DECL_MODE (ddecl) = DECL_MODE (origin);
2938
2939 VEC_safe_push (tree, gc, *debug_args, origin);
2940 VEC_safe_push (tree, gc, *debug_args, ddecl);
2941 }
2942 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
2943 stmt);
2944 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
2945 }
2946 }
2947
2948 if (dump_file && (dump_flags & TDF_DETAILS))
2949 {
2950 fprintf (dump_file, "replacing stmt:");
2951 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2952 }
2953
2954 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2955 VEC_free (tree, heap, vargs);
2956 if (gimple_call_lhs (stmt))
2957 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2958
2959 gimple_set_block (new_stmt, gimple_block (stmt));
2960 if (gimple_has_location (stmt))
2961 gimple_set_location (new_stmt, gimple_location (stmt));
2962 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2963 gimple_call_copy_flags (new_stmt, stmt);
2964
2965 if (dump_file && (dump_flags & TDF_DETAILS))
2966 {
2967 fprintf (dump_file, "with stmt:");
2968 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2969 fprintf (dump_file, "\n");
2970 }
2971 gsi_replace (&gsi, new_stmt, true);
2972 if (cs)
2973 cgraph_set_call_stmt (cs, new_stmt);
2974 update_ssa (TODO_update_ssa);
2975 free_dominance_info (CDI_DOMINATORS);
2976 }
2977
2978 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2979
2980 static bool
2981 index_in_adjustments_multiple_times_p (int base_index,
2982 ipa_parm_adjustment_vec adjustments)
2983 {
2984 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2985 bool one = false;
2986
2987 for (i = 0; i < len; i++)
2988 {
2989 struct ipa_parm_adjustment *adj;
2990 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2991
2992 if (adj->base_index == base_index)
2993 {
2994 if (one)
2995 return true;
2996 else
2997 one = true;
2998 }
2999 }
3000 return false;
3001 }
3002
3003
3004 /* Return adjustments that should have the same effect on function parameters
3005 and call arguments as if they were first changed according to adjustments in
3006 INNER and then by adjustments in OUTER. */
3007
3008 ipa_parm_adjustment_vec
3009 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3010 ipa_parm_adjustment_vec outer)
3011 {
3012 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
3013 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
3014 int removals = 0;
3015 ipa_parm_adjustment_vec adjustments, tmp;
3016
3017 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
3018 for (i = 0; i < inlen; i++)
3019 {
3020 struct ipa_parm_adjustment *n;
3021 n = &VEC_index (ipa_parm_adjustment_t, inner, i);
3022
3023 if (n->remove_param)
3024 removals++;
3025 else
3026 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
3027 }
3028
3029 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
3030 for (i = 0; i < outlen; i++)
3031 {
3032 struct ipa_parm_adjustment *r;
3033 struct ipa_parm_adjustment *out = &VEC_index (ipa_parm_adjustment_t,
3034 outer, i);
3035 struct ipa_parm_adjustment *in = &VEC_index (ipa_parm_adjustment_t, tmp,
3036 out->base_index);
3037
3038 gcc_assert (!in->remove_param);
3039 if (out->remove_param)
3040 {
3041 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3042 {
3043 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3044 memset (r, 0, sizeof (*r));
3045 r->remove_param = true;
3046 }
3047 continue;
3048 }
3049
3050 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3051 memset (r, 0, sizeof (*r));
3052 r->base_index = in->base_index;
3053 r->type = out->type;
3054
3055 /* FIXME: Create nonlocal value too. */
3056
3057 if (in->copy_param && out->copy_param)
3058 r->copy_param = true;
3059 else if (in->copy_param)
3060 r->offset = out->offset;
3061 else if (out->copy_param)
3062 r->offset = in->offset;
3063 else
3064 r->offset = in->offset + out->offset;
3065 }
3066
3067 for (i = 0; i < inlen; i++)
3068 {
3069 struct ipa_parm_adjustment *n = &VEC_index (ipa_parm_adjustment_t,
3070 inner, i);
3071
3072 if (n->remove_param)
3073 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
3074 }
3075
3076 VEC_free (ipa_parm_adjustment_t, heap, tmp);
3077 return adjustments;
3078 }
3079
3080 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3081 friendly way, assuming they are meant to be applied to FNDECL. */
3082
3083 void
3084 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3085 tree fndecl)
3086 {
3087 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
3088 bool first = true;
3089 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
3090
3091 fprintf (file, "IPA param adjustments: ");
3092 for (i = 0; i < len; i++)
3093 {
3094 struct ipa_parm_adjustment *adj;
3095 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
3096
3097 if (!first)
3098 fprintf (file, " ");
3099 else
3100 first = false;
3101
3102 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3103 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
3104 if (adj->base)
3105 {
3106 fprintf (file, ", base: ");
3107 print_generic_expr (file, adj->base, 0);
3108 }
3109 if (adj->reduction)
3110 {
3111 fprintf (file, ", reduction: ");
3112 print_generic_expr (file, adj->reduction, 0);
3113 }
3114 if (adj->new_ssa_base)
3115 {
3116 fprintf (file, ", new_ssa_base: ");
3117 print_generic_expr (file, adj->new_ssa_base, 0);
3118 }
3119
3120 if (adj->copy_param)
3121 fprintf (file, ", copy_param");
3122 else if (adj->remove_param)
3123 fprintf (file, ", remove_param");
3124 else
3125 fprintf (file, ", offset %li", (long) adj->offset);
3126 if (adj->by_ref)
3127 fprintf (file, ", by_ref");
3128 print_node_brief (file, ", type: ", adj->type, 0);
3129 fprintf (file, "\n");
3130 }
3131 VEC_free (tree, heap, parms);
3132 }
3133
3134 /* Stream out jump function JUMP_FUNC to OB. */
3135
3136 static void
3137 ipa_write_jump_function (struct output_block *ob,
3138 struct ipa_jump_func *jump_func)
3139 {
3140 struct ipa_agg_jf_item *item;
3141 struct bitpack_d bp;
3142 int i, count;
3143
3144 streamer_write_uhwi (ob, jump_func->type);
3145 switch (jump_func->type)
3146 {
3147 case IPA_JF_UNKNOWN:
3148 break;
3149 case IPA_JF_KNOWN_TYPE:
3150 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3151 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3152 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3153 break;
3154 case IPA_JF_CONST:
3155 stream_write_tree (ob, jump_func->value.constant, true);
3156 break;
3157 case IPA_JF_PASS_THROUGH:
3158 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3159 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3160 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3161 bp = bitpack_create (ob->main_stream);
3162 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3163 streamer_write_bitpack (&bp);
3164 break;
3165 case IPA_JF_ANCESTOR:
3166 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3167 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3168 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3169 bp = bitpack_create (ob->main_stream);
3170 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3171 streamer_write_bitpack (&bp);
3172 break;
3173 }
3174
3175 count = VEC_length (ipa_agg_jf_item_t, jump_func->agg.items);
3176 streamer_write_uhwi (ob, count);
3177 if (count)
3178 {
3179 bp = bitpack_create (ob->main_stream);
3180 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3181 streamer_write_bitpack (&bp);
3182 }
3183
3184 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items, i, item)
3185 {
3186 streamer_write_uhwi (ob, item->offset);
3187 stream_write_tree (ob, item->value, true);
3188 }
3189 }
3190
3191 /* Read in jump function JUMP_FUNC from IB. */
3192
3193 static void
3194 ipa_read_jump_function (struct lto_input_block *ib,
3195 struct ipa_jump_func *jump_func,
3196 struct data_in *data_in)
3197 {
3198 struct bitpack_d bp;
3199 int i, count;
3200
3201 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
3202 switch (jump_func->type)
3203 {
3204 case IPA_JF_UNKNOWN:
3205 break;
3206 case IPA_JF_KNOWN_TYPE:
3207 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
3208 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
3209 jump_func->value.known_type.component_type = stream_read_tree (ib,
3210 data_in);
3211 break;
3212 case IPA_JF_CONST:
3213 jump_func->value.constant = stream_read_tree (ib, data_in);
3214 break;
3215 case IPA_JF_PASS_THROUGH:
3216 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
3217 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
3218 jump_func->value.pass_through.operation
3219 = (enum tree_code) streamer_read_uhwi (ib);
3220 bp = streamer_read_bitpack (ib);
3221 jump_func->value.pass_through.agg_preserved = bp_unpack_value (&bp, 1);
3222 break;
3223 case IPA_JF_ANCESTOR:
3224 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
3225 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
3226 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
3227 bp = streamer_read_bitpack (ib);
3228 jump_func->value.ancestor.agg_preserved = bp_unpack_value (&bp, 1);
3229 break;
3230 }
3231
3232 count = streamer_read_uhwi (ib);
3233 jump_func->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, count);
3234 if (count)
3235 {
3236 bp = streamer_read_bitpack (ib);
3237 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3238 }
3239 for (i = 0; i < count; i++)
3240 {
3241 struct ipa_agg_jf_item *item = VEC_quick_push (ipa_agg_jf_item_t,
3242 jump_func->agg.items, NULL);
3243
3244 item->offset = streamer_read_uhwi (ib);
3245 item->value = stream_read_tree (ib, data_in);
3246 }
3247 }
3248
3249 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3250 relevant to indirect inlining to OB. */
3251
3252 static void
3253 ipa_write_indirect_edge_info (struct output_block *ob,
3254 struct cgraph_edge *cs)
3255 {
3256 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3257 struct bitpack_d bp;
3258
3259 streamer_write_hwi (ob, ii->param_index);
3260 streamer_write_hwi (ob, ii->offset);
3261 bp = bitpack_create (ob->main_stream);
3262 bp_pack_value (&bp, ii->polymorphic, 1);
3263 bp_pack_value (&bp, ii->agg_contents, 1);
3264 bp_pack_value (&bp, ii->by_ref, 1);
3265 streamer_write_bitpack (&bp);
3266
3267 if (ii->polymorphic)
3268 {
3269 streamer_write_hwi (ob, ii->otr_token);
3270 stream_write_tree (ob, ii->otr_type, true);
3271 }
3272 }
3273
3274 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3275 relevant to indirect inlining from IB. */
3276
3277 static void
3278 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3279 struct data_in *data_in ATTRIBUTE_UNUSED,
3280 struct cgraph_edge *cs)
3281 {
3282 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3283 struct bitpack_d bp;
3284
3285 ii->param_index = (int) streamer_read_hwi (ib);
3286 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3287 bp = streamer_read_bitpack (ib);
3288 ii->polymorphic = bp_unpack_value (&bp, 1);
3289 ii->agg_contents = bp_unpack_value (&bp, 1);
3290 ii->by_ref = bp_unpack_value (&bp, 1);
3291 if (ii->polymorphic)
3292 {
3293 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3294 ii->otr_type = stream_read_tree (ib, data_in);
3295 }
3296 }
3297
3298 /* Stream out NODE info to OB. */
3299
3300 static void
3301 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3302 {
3303 int node_ref;
3304 lto_symtab_encoder_t encoder;
3305 struct ipa_node_params *info = IPA_NODE_REF (node);
3306 int j;
3307 struct cgraph_edge *e;
3308 struct bitpack_d bp;
3309
3310 encoder = ob->decl_state->symtab_node_encoder;
3311 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3312 streamer_write_uhwi (ob, node_ref);
3313
3314 bp = bitpack_create (ob->main_stream);
3315 gcc_assert (info->uses_analysis_done
3316 || ipa_get_param_count (info) == 0);
3317 gcc_assert (!info->node_enqueued);
3318 gcc_assert (!info->ipcp_orig_node);
3319 for (j = 0; j < ipa_get_param_count (info); j++)
3320 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3321 streamer_write_bitpack (&bp);
3322 for (e = node->callees; e; e = e->next_callee)
3323 {
3324 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3325
3326 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3327 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3328 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3329 }
3330 for (e = node->indirect_calls; e; e = e->next_callee)
3331 {
3332 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3333
3334 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3335 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3336 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3337 ipa_write_indirect_edge_info (ob, e);
3338 }
3339 }
3340
3341 /* Stream in NODE info from IB. */
3342
3343 static void
3344 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3345 struct data_in *data_in)
3346 {
3347 struct ipa_node_params *info = IPA_NODE_REF (node);
3348 int k;
3349 struct cgraph_edge *e;
3350 struct bitpack_d bp;
3351
3352 ipa_initialize_node_params (node);
3353
3354 bp = streamer_read_bitpack (ib);
3355 if (ipa_get_param_count (info) != 0)
3356 info->uses_analysis_done = true;
3357 info->node_enqueued = false;
3358 for (k = 0; k < ipa_get_param_count (info); k++)
3359 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3360 for (e = node->callees; e; e = e->next_callee)
3361 {
3362 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3363 int count = streamer_read_uhwi (ib);
3364
3365 if (!count)
3366 continue;
3367 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
3368
3369 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3370 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
3371 }
3372 for (e = node->indirect_calls; e; e = e->next_callee)
3373 {
3374 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3375 int count = streamer_read_uhwi (ib);
3376
3377 if (count)
3378 {
3379 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
3380 count);
3381 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3382 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
3383 data_in);
3384 }
3385 ipa_read_indirect_edge_info (ib, data_in, e);
3386 }
3387 }
3388
3389 /* Write jump functions for nodes in SET. */
3390
3391 void
3392 ipa_prop_write_jump_functions (void)
3393 {
3394 struct cgraph_node *node;
3395 struct output_block *ob;
3396 unsigned int count = 0;
3397 lto_symtab_encoder_iterator lsei;
3398 lto_symtab_encoder_t encoder;
3399
3400
3401 if (!ipa_node_params_vector)
3402 return;
3403
3404 ob = create_output_block (LTO_section_jump_functions);
3405 encoder = ob->decl_state->symtab_node_encoder;
3406 ob->cgraph_node = NULL;
3407 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3408 lsei_next_function_in_partition (&lsei))
3409 {
3410 node = lsei_cgraph_node (lsei);
3411 if (cgraph_function_with_gimple_body_p (node)
3412 && IPA_NODE_REF (node) != NULL)
3413 count++;
3414 }
3415
3416 streamer_write_uhwi (ob, count);
3417
3418 /* Process all of the functions. */
3419 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3420 lsei_next_function_in_partition (&lsei))
3421 {
3422 node = lsei_cgraph_node (lsei);
3423 if (cgraph_function_with_gimple_body_p (node)
3424 && IPA_NODE_REF (node) != NULL)
3425 ipa_write_node_info (ob, node);
3426 }
3427 streamer_write_char_stream (ob->main_stream, 0);
3428 produce_asm (ob, NULL);
3429 destroy_output_block (ob);
3430 }
3431
3432 /* Read section in file FILE_DATA of length LEN with data DATA. */
3433
3434 static void
3435 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3436 size_t len)
3437 {
3438 const struct lto_function_header *header =
3439 (const struct lto_function_header *) data;
3440 const int cfg_offset = sizeof (struct lto_function_header);
3441 const int main_offset = cfg_offset + header->cfg_size;
3442 const int string_offset = main_offset + header->main_size;
3443 struct data_in *data_in;
3444 struct lto_input_block ib_main;
3445 unsigned int i;
3446 unsigned int count;
3447
3448 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3449 header->main_size);
3450
3451 data_in =
3452 lto_data_in_create (file_data, (const char *) data + string_offset,
3453 header->string_size, NULL);
3454 count = streamer_read_uhwi (&ib_main);
3455
3456 for (i = 0; i < count; i++)
3457 {
3458 unsigned int index;
3459 struct cgraph_node *node;
3460 lto_symtab_encoder_t encoder;
3461
3462 index = streamer_read_uhwi (&ib_main);
3463 encoder = file_data->symtab_node_encoder;
3464 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3465 gcc_assert (node->analyzed);
3466 ipa_read_node_info (&ib_main, node, data_in);
3467 }
3468 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3469 len);
3470 lto_data_in_delete (data_in);
3471 }
3472
3473 /* Read ipcp jump functions. */
3474
3475 void
3476 ipa_prop_read_jump_functions (void)
3477 {
3478 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3479 struct lto_file_decl_data *file_data;
3480 unsigned int j = 0;
3481
3482 ipa_check_create_node_params ();
3483 ipa_check_create_edge_args ();
3484 ipa_register_cgraph_hooks ();
3485
3486 while ((file_data = file_data_vec[j++]))
3487 {
3488 size_t len;
3489 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3490
3491 if (data)
3492 ipa_prop_read_section (file_data, data, len);
3493 }
3494 }
3495
3496 /* After merging units, we can get mismatch in argument counts.
3497 Also decl merging might've rendered parameter lists obsolete.
3498 Also compute called_with_variable_arg info. */
3499
3500 void
3501 ipa_update_after_lto_read (void)
3502 {
3503 struct cgraph_node *node;
3504
3505 ipa_check_create_node_params ();
3506 ipa_check_create_edge_args ();
3507
3508 FOR_EACH_DEFINED_FUNCTION (node)
3509 if (node->analyzed)
3510 ipa_initialize_node_params (node);
3511 }