timevar.c (validate_phases): Use size_t for memory.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "langhooks.h"
25 #include "ggc.h"
26 #include "target.h"
27 #include "cgraph.h"
28 #include "ipa-prop.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-inline.h"
32 #include "ipa-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
41
42 /* Intermediate information about a parameter that is only useful during the
43 run of ipa_analyze_node and is not kept afterwards. */
44
45 struct param_analysis_info
46 {
47 bool parm_modified, ref_modified, pt_modified;
48 bitmap parm_visited_statements, pt_visited_statements;
49 };
50
51 /* Vector where the parameter infos are actually stored. */
52 vec<ipa_node_params_t> ipa_node_params_vector;
53 /* Vector of known aggregate values in cloned nodes. */
54 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
55 /* Vector where the parameter infos are actually stored. */
56 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
57
58 /* Holders of ipa cgraph hooks: */
59 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
60 static struct cgraph_node_hook_list *node_removal_hook_holder;
61 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
62 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
63 static struct cgraph_node_hook_list *function_insertion_hook_holder;
64
65 /* Description of a reference to an IPA constant. */
66 struct ipa_cst_ref_desc
67 {
68 /* Edge that corresponds to the statement which took the reference. */
69 struct cgraph_edge *cs;
70 /* Linked list of duplicates created when call graph edges are cloned. */
71 struct ipa_cst_ref_desc *next_duplicate;
72 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
73 if out of control. */
74 int refcount;
75 };
76
77 /* Allocation pool for reference descriptions. */
78
79 static alloc_pool ipa_refdesc_pool;
80
81 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
82 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
83
84 static bool
85 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
86 {
87 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->symbol.decl);
88 struct cl_optimization *os;
89
90 if (!fs_opts)
91 return false;
92 os = TREE_OPTIMIZATION (fs_opts);
93 return !os->x_optimize || !os->x_flag_ipa_cp;
94 }
95
96 /* Return index of the formal whose tree is PTREE in function which corresponds
97 to INFO. */
98
99 static int
100 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
101 {
102 int i, count;
103
104 count = descriptors.length ();
105 for (i = 0; i < count; i++)
106 if (descriptors[i].decl == ptree)
107 return i;
108
109 return -1;
110 }
111
112 /* Return index of the formal whose tree is PTREE in function which corresponds
113 to INFO. */
114
115 int
116 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
117 {
118 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
119 }
120
121 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
122 NODE. */
123
124 static void
125 ipa_populate_param_decls (struct cgraph_node *node,
126 vec<ipa_param_descriptor_t> &descriptors)
127 {
128 tree fndecl;
129 tree fnargs;
130 tree parm;
131 int param_num;
132
133 fndecl = node->symbol.decl;
134 gcc_assert (gimple_has_body_p (fndecl));
135 fnargs = DECL_ARGUMENTS (fndecl);
136 param_num = 0;
137 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
138 {
139 descriptors[param_num].decl = parm;
140 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
141 param_num++;
142 }
143 }
144
145 /* Return how many formal parameters FNDECL has. */
146
147 static inline int
148 count_formal_params (tree fndecl)
149 {
150 tree parm;
151 int count = 0;
152 gcc_assert (gimple_has_body_p (fndecl));
153
154 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
155 count++;
156
157 return count;
158 }
159
160 /* Return the declaration of Ith formal parameter of the function corresponding
161 to INFO. Note there is no setter function as this array is built just once
162 using ipa_initialize_node_params. */
163
164 void
165 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
166 {
167 fprintf (file, "param #%i", i);
168 if (info->descriptors[i].decl)
169 {
170 fprintf (file, " ");
171 print_generic_expr (file, info->descriptors[i].decl, 0);
172 }
173 }
174
175 /* Initialize the ipa_node_params structure associated with NODE
176 to hold PARAM_COUNT parameters. */
177
178 void
179 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
180 {
181 struct ipa_node_params *info = IPA_NODE_REF (node);
182
183 if (!info->descriptors.exists () && param_count)
184 info->descriptors.safe_grow_cleared (param_count);
185 }
186
187 /* Initialize the ipa_node_params structure associated with NODE by counting
188 the function parameters, creating the descriptors and populating their
189 param_decls. */
190
191 void
192 ipa_initialize_node_params (struct cgraph_node *node)
193 {
194 struct ipa_node_params *info = IPA_NODE_REF (node);
195
196 if (!info->descriptors.exists ())
197 {
198 ipa_alloc_node_params (node, count_formal_params (node->symbol.decl));
199 ipa_populate_param_decls (node, info->descriptors);
200 }
201 }
202
203 /* Print the jump functions associated with call graph edge CS to file F. */
204
205 static void
206 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
207 {
208 int i, count;
209
210 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
211 for (i = 0; i < count; i++)
212 {
213 struct ipa_jump_func *jump_func;
214 enum jump_func_type type;
215
216 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
217 type = jump_func->type;
218
219 fprintf (f, " param %d: ", i);
220 if (type == IPA_JF_UNKNOWN)
221 fprintf (f, "UNKNOWN\n");
222 else if (type == IPA_JF_KNOWN_TYPE)
223 {
224 fprintf (f, "KNOWN TYPE: base ");
225 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
226 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
227 jump_func->value.known_type.offset);
228 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
229 fprintf (f, "\n");
230 }
231 else if (type == IPA_JF_CONST)
232 {
233 tree val = jump_func->value.constant.value;
234 fprintf (f, "CONST: ");
235 print_generic_expr (f, val, 0);
236 if (TREE_CODE (val) == ADDR_EXPR
237 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
238 {
239 fprintf (f, " -> ");
240 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
241 0);
242 }
243 fprintf (f, "\n");
244 }
245 else if (type == IPA_JF_PASS_THROUGH)
246 {
247 fprintf (f, "PASS THROUGH: ");
248 fprintf (f, "%d, op %s",
249 jump_func->value.pass_through.formal_id,
250 tree_code_name[(int)
251 jump_func->value.pass_through.operation]);
252 if (jump_func->value.pass_through.operation != NOP_EXPR)
253 {
254 fprintf (f, " ");
255 print_generic_expr (f,
256 jump_func->value.pass_through.operand, 0);
257 }
258 if (jump_func->value.pass_through.agg_preserved)
259 fprintf (f, ", agg_preserved");
260 fprintf (f, "\n");
261 }
262 else if (type == IPA_JF_ANCESTOR)
263 {
264 fprintf (f, "ANCESTOR: ");
265 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
266 jump_func->value.ancestor.formal_id,
267 jump_func->value.ancestor.offset);
268 print_generic_expr (f, jump_func->value.ancestor.type, 0);
269 if (jump_func->value.ancestor.agg_preserved)
270 fprintf (f, ", agg_preserved");
271 fprintf (f, "\n");
272 }
273
274 if (jump_func->agg.items)
275 {
276 struct ipa_agg_jf_item *item;
277 int j;
278
279 fprintf (f, " Aggregate passed by %s:\n",
280 jump_func->agg.by_ref ? "reference" : "value");
281 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
282 {
283 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
284 item->offset);
285 if (TYPE_P (item->value))
286 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
287 tree_low_cst (TYPE_SIZE (item->value), 1));
288 else
289 {
290 fprintf (f, "cst: ");
291 print_generic_expr (f, item->value, 0);
292 }
293 fprintf (f, "\n");
294 }
295 }
296 }
297 }
298
299
300 /* Print the jump functions of all arguments on all call graph edges going from
301 NODE to file F. */
302
303 void
304 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
305 {
306 struct cgraph_edge *cs;
307
308 fprintf (f, " Jump functions of caller %s/%i:\n", cgraph_node_name (node),
309 node->symbol.order);
310 for (cs = node->callees; cs; cs = cs->next_callee)
311 {
312 if (!ipa_edge_args_info_available_for_edge_p (cs))
313 continue;
314
315 fprintf (f, " callsite %s/%i -> %s/%i : \n",
316 xstrdup (cgraph_node_name (node)), node->symbol.order,
317 xstrdup (cgraph_node_name (cs->callee)),
318 cs->callee->symbol.order);
319 ipa_print_node_jump_functions_for_edge (f, cs);
320 }
321
322 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
323 {
324 struct cgraph_indirect_call_info *ii;
325 if (!ipa_edge_args_info_available_for_edge_p (cs))
326 continue;
327
328 ii = cs->indirect_info;
329 if (ii->agg_contents)
330 fprintf (f, " indirect %s callsite, calling param %i, "
331 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
332 ii->member_ptr ? "member ptr" : "aggregate",
333 ii->param_index, ii->offset,
334 ii->by_ref ? "by reference" : "by_value");
335 else
336 fprintf (f, " indirect %s callsite, calling param %i",
337 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
338
339 if (cs->call_stmt)
340 {
341 fprintf (f, ", for stmt ");
342 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
343 }
344 else
345 fprintf (f, "\n");
346 ipa_print_node_jump_functions_for_edge (f, cs);
347 }
348 }
349
350 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
351
352 void
353 ipa_print_all_jump_functions (FILE *f)
354 {
355 struct cgraph_node *node;
356
357 fprintf (f, "\nJump functions:\n");
358 FOR_EACH_FUNCTION (node)
359 {
360 ipa_print_node_jump_functions (f, node);
361 }
362 }
363
364 /* Set JFUNC to be a known type jump function. */
365
366 static void
367 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
368 tree base_type, tree component_type)
369 {
370 jfunc->type = IPA_JF_KNOWN_TYPE;
371 jfunc->value.known_type.offset = offset,
372 jfunc->value.known_type.base_type = base_type;
373 jfunc->value.known_type.component_type = component_type;
374 }
375
376 /* Set JFUNC to be a constant jmp function. */
377
378 static void
379 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
380 struct cgraph_edge *cs)
381 {
382 constant = unshare_expr (constant);
383 if (constant && EXPR_P (constant))
384 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
385 jfunc->type = IPA_JF_CONST;
386 jfunc->value.constant.value = unshare_expr_without_location (constant);
387
388 if (TREE_CODE (constant) == ADDR_EXPR
389 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
390 {
391 struct ipa_cst_ref_desc *rdesc;
392 if (!ipa_refdesc_pool)
393 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
394 sizeof (struct ipa_cst_ref_desc), 32);
395
396 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
397 rdesc->cs = cs;
398 rdesc->next_duplicate = NULL;
399 rdesc->refcount = 1;
400 jfunc->value.constant.rdesc = rdesc;
401 }
402 else
403 jfunc->value.constant.rdesc = NULL;
404 }
405
406 /* Set JFUNC to be a simple pass-through jump function. */
407 static void
408 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
409 bool agg_preserved)
410 {
411 jfunc->type = IPA_JF_PASS_THROUGH;
412 jfunc->value.pass_through.operand = NULL_TREE;
413 jfunc->value.pass_through.formal_id = formal_id;
414 jfunc->value.pass_through.operation = NOP_EXPR;
415 jfunc->value.pass_through.agg_preserved = agg_preserved;
416 }
417
418 /* Set JFUNC to be an arithmetic pass through jump function. */
419
420 static void
421 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
422 tree operand, enum tree_code operation)
423 {
424 jfunc->type = IPA_JF_PASS_THROUGH;
425 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
426 jfunc->value.pass_through.formal_id = formal_id;
427 jfunc->value.pass_through.operation = operation;
428 jfunc->value.pass_through.agg_preserved = false;
429 }
430
431 /* Set JFUNC to be an ancestor jump function. */
432
433 static void
434 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
435 tree type, int formal_id, bool agg_preserved)
436 {
437 jfunc->type = IPA_JF_ANCESTOR;
438 jfunc->value.ancestor.formal_id = formal_id;
439 jfunc->value.ancestor.offset = offset;
440 jfunc->value.ancestor.type = type;
441 jfunc->value.ancestor.agg_preserved = agg_preserved;
442 }
443
444 /* Extract the acual BINFO being described by JFUNC which must be a known type
445 jump function. */
446
447 tree
448 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
449 {
450 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
451 if (!base_binfo)
452 return NULL_TREE;
453 return get_binfo_at_offset (base_binfo,
454 jfunc->value.known_type.offset,
455 jfunc->value.known_type.component_type);
456 }
457
458 /* Structure to be passed in between detect_type_change and
459 check_stmt_for_type_change. */
460
461 struct type_change_info
462 {
463 /* Offset into the object where there is the virtual method pointer we are
464 looking for. */
465 HOST_WIDE_INT offset;
466 /* The declaration or SSA_NAME pointer of the base that we are checking for
467 type change. */
468 tree object;
469 /* If we actually can tell the type that the object has changed to, it is
470 stored in this field. Otherwise it remains NULL_TREE. */
471 tree known_current_type;
472 /* Set to true if dynamic type change has been detected. */
473 bool type_maybe_changed;
474 /* Set to true if multiple types have been encountered. known_current_type
475 must be disregarded in that case. */
476 bool multiple_types_encountered;
477 };
478
479 /* Return true if STMT can modify a virtual method table pointer.
480
481 This function makes special assumptions about both constructors and
482 destructors which are all the functions that are allowed to alter the VMT
483 pointers. It assumes that destructors begin with assignment into all VMT
484 pointers and that constructors essentially look in the following way:
485
486 1) The very first thing they do is that they call constructors of ancestor
487 sub-objects that have them.
488
489 2) Then VMT pointers of this and all its ancestors is set to new values
490 corresponding to the type corresponding to the constructor.
491
492 3) Only afterwards, other stuff such as constructor of member sub-objects
493 and the code written by the user is run. Only this may include calling
494 virtual functions, directly or indirectly.
495
496 There is no way to call a constructor of an ancestor sub-object in any
497 other way.
498
499 This means that we do not have to care whether constructors get the correct
500 type information because they will always change it (in fact, if we define
501 the type to be given by the VMT pointer, it is undefined).
502
503 The most important fact to derive from the above is that if, for some
504 statement in the section 3, we try to detect whether the dynamic type has
505 changed, we can safely ignore all calls as we examine the function body
506 backwards until we reach statements in section 2 because these calls cannot
507 be ancestor constructors or destructors (if the input is not bogus) and so
508 do not change the dynamic type (this holds true only for automatically
509 allocated objects but at the moment we devirtualize only these). We then
510 must detect that statements in section 2 change the dynamic type and can try
511 to derive the new type. That is enough and we can stop, we will never see
512 the calls into constructors of sub-objects in this code. Therefore we can
513 safely ignore all call statements that we traverse.
514 */
515
516 static bool
517 stmt_may_be_vtbl_ptr_store (gimple stmt)
518 {
519 if (is_gimple_call (stmt))
520 return false;
521 else if (is_gimple_assign (stmt))
522 {
523 tree lhs = gimple_assign_lhs (stmt);
524
525 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
526 {
527 if (flag_strict_aliasing
528 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
529 return false;
530
531 if (TREE_CODE (lhs) == COMPONENT_REF
532 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
533 return false;
534 /* In the future we might want to use get_base_ref_and_offset to find
535 if there is a field corresponding to the offset and if so, proceed
536 almost like if it was a component ref. */
537 }
538 }
539 return true;
540 }
541
542 /* If STMT can be proved to be an assignment to the virtual method table
543 pointer of ANALYZED_OBJ and the type associated with the new table
544 identified, return the type. Otherwise return NULL_TREE. */
545
546 static tree
547 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
548 {
549 HOST_WIDE_INT offset, size, max_size;
550 tree lhs, rhs, base;
551
552 if (!gimple_assign_single_p (stmt))
553 return NULL_TREE;
554
555 lhs = gimple_assign_lhs (stmt);
556 rhs = gimple_assign_rhs1 (stmt);
557 if (TREE_CODE (lhs) != COMPONENT_REF
558 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
559 || TREE_CODE (rhs) != ADDR_EXPR)
560 return NULL_TREE;
561 rhs = get_base_address (TREE_OPERAND (rhs, 0));
562 if (!rhs
563 || TREE_CODE (rhs) != VAR_DECL
564 || !DECL_VIRTUAL_P (rhs))
565 return NULL_TREE;
566
567 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
568 if (offset != tci->offset
569 || size != POINTER_SIZE
570 || max_size != POINTER_SIZE)
571 return NULL_TREE;
572 if (TREE_CODE (base) == MEM_REF)
573 {
574 if (TREE_CODE (tci->object) != MEM_REF
575 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
576 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
577 TREE_OPERAND (base, 1)))
578 return NULL_TREE;
579 }
580 else if (tci->object != base)
581 return NULL_TREE;
582
583 return DECL_CONTEXT (rhs);
584 }
585
586 /* Callback of walk_aliased_vdefs and a helper function for
587 detect_type_change to check whether a particular statement may modify
588 the virtual table pointer, and if possible also determine the new type of
589 the (sub-)object. It stores its result into DATA, which points to a
590 type_change_info structure. */
591
592 static bool
593 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
594 {
595 gimple stmt = SSA_NAME_DEF_STMT (vdef);
596 struct type_change_info *tci = (struct type_change_info *) data;
597
598 if (stmt_may_be_vtbl_ptr_store (stmt))
599 {
600 tree type;
601 type = extr_type_from_vtbl_ptr_store (stmt, tci);
602 if (tci->type_maybe_changed
603 && type != tci->known_current_type)
604 tci->multiple_types_encountered = true;
605 tci->known_current_type = type;
606 tci->type_maybe_changed = true;
607 return true;
608 }
609 else
610 return false;
611 }
612
613
614
615 /* Like detect_type_change but with extra argument COMP_TYPE which will become
616 the component type part of new JFUNC of dynamic type change is detected and
617 the new base type is identified. */
618
619 static bool
620 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
621 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
622 {
623 struct type_change_info tci;
624 ao_ref ao;
625
626 gcc_checking_assert (DECL_P (arg)
627 || TREE_CODE (arg) == MEM_REF
628 || handled_component_p (arg));
629 /* Const calls cannot call virtual methods through VMT and so type changes do
630 not matter. */
631 if (!flag_devirtualize || !gimple_vuse (call))
632 return false;
633
634 ao_ref_init (&ao, arg);
635 ao.base = base;
636 ao.offset = offset;
637 ao.size = POINTER_SIZE;
638 ao.max_size = ao.size;
639
640 tci.offset = offset;
641 tci.object = get_base_address (arg);
642 tci.known_current_type = NULL_TREE;
643 tci.type_maybe_changed = false;
644 tci.multiple_types_encountered = false;
645
646 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
647 &tci, NULL);
648 if (!tci.type_maybe_changed)
649 return false;
650
651 if (!tci.known_current_type
652 || tci.multiple_types_encountered
653 || offset != 0)
654 jfunc->type = IPA_JF_UNKNOWN;
655 else
656 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
657
658 return true;
659 }
660
661 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
662 looking for assignments to its virtual table pointer. If it is, return true
663 and fill in the jump function JFUNC with relevant type information or set it
664 to unknown. ARG is the object itself (not a pointer to it, unless
665 dereferenced). BASE is the base of the memory access as returned by
666 get_ref_base_and_extent, as is the offset. */
667
668 static bool
669 detect_type_change (tree arg, tree base, gimple call,
670 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
671 {
672 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
673 }
674
675 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
676 SSA name (its dereference will become the base and the offset is assumed to
677 be zero). */
678
679 static bool
680 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
681 {
682 tree comp_type;
683
684 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
685 if (!flag_devirtualize
686 || !POINTER_TYPE_P (TREE_TYPE (arg))
687 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
688 return false;
689
690 comp_type = TREE_TYPE (TREE_TYPE (arg));
691 arg = build2 (MEM_REF, ptr_type_node, arg,
692 build_int_cst (ptr_type_node, 0));
693
694 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
695 }
696
697 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
698 boolean variable pointed to by DATA. */
699
700 static bool
701 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
702 void *data)
703 {
704 bool *b = (bool *) data;
705 *b = true;
706 return true;
707 }
708
709 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
710 a value known not to be modified in this function before reaching the
711 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
712 information about the parameter. */
713
714 static bool
715 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
716 gimple stmt, tree parm_load)
717 {
718 bool modified = false;
719 bitmap *visited_stmts;
720 ao_ref refd;
721
722 if (parm_ainfo && parm_ainfo->parm_modified)
723 return false;
724
725 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
726 ao_ref_init (&refd, parm_load);
727 /* We can cache visited statements only when parm_ainfo is available and when
728 we are looking at a naked load of the whole parameter. */
729 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
730 visited_stmts = NULL;
731 else
732 visited_stmts = &parm_ainfo->parm_visited_statements;
733 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
734 visited_stmts);
735 if (parm_ainfo && modified)
736 parm_ainfo->parm_modified = true;
737 return !modified;
738 }
739
740 /* If STMT is an assignment that loads a value from an parameter declaration,
741 return the index of the parameter in ipa_node_params which has not been
742 modified. Otherwise return -1. */
743
744 static int
745 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
746 struct param_analysis_info *parms_ainfo,
747 gimple stmt)
748 {
749 int index;
750 tree op1;
751
752 if (!gimple_assign_single_p (stmt))
753 return -1;
754
755 op1 = gimple_assign_rhs1 (stmt);
756 if (TREE_CODE (op1) != PARM_DECL)
757 return -1;
758
759 index = ipa_get_param_decl_index_1 (descriptors, op1);
760 if (index < 0
761 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
762 : NULL, stmt, op1))
763 return -1;
764
765 return index;
766 }
767
768 /* Return true if memory reference REF loads data that are known to be
769 unmodified in this function before reaching statement STMT. PARM_AINFO, if
770 non-NULL, is a pointer to a structure containing temporary information about
771 PARM. */
772
773 static bool
774 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
775 gimple stmt, tree ref)
776 {
777 bool modified = false;
778 ao_ref refd;
779
780 gcc_checking_assert (gimple_vuse (stmt));
781 if (parm_ainfo && parm_ainfo->ref_modified)
782 return false;
783
784 ao_ref_init (&refd, ref);
785 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
786 NULL);
787 if (parm_ainfo && modified)
788 parm_ainfo->ref_modified = true;
789 return !modified;
790 }
791
792 /* Return true if the data pointed to by PARM is known to be unmodified in this
793 function before reaching call statement CALL into which it is passed.
794 PARM_AINFO is a pointer to a structure containing temporary information
795 about PARM. */
796
797 static bool
798 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
799 gimple call, tree parm)
800 {
801 bool modified = false;
802 ao_ref refd;
803
804 /* It's unnecessary to calculate anything about memory contnets for a const
805 function because it is not goin to use it. But do not cache the result
806 either. Also, no such calculations for non-pointers. */
807 if (!gimple_vuse (call)
808 || !POINTER_TYPE_P (TREE_TYPE (parm)))
809 return false;
810
811 if (parm_ainfo->pt_modified)
812 return false;
813
814 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
815 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
816 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
817 if (modified)
818 parm_ainfo->pt_modified = true;
819 return !modified;
820 }
821
822 /* Return true if we can prove that OP is a memory reference loading unmodified
823 data from an aggregate passed as a parameter and if the aggregate is passed
824 by reference, that the alias type of the load corresponds to the type of the
825 formal parameter (so that we can rely on this type for TBAA in callers).
826 INFO and PARMS_AINFO describe parameters of the current function (but the
827 latter can be NULL), STMT is the load statement. If function returns true,
828 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
829 within the aggregate and whether it is a load from a value passed by
830 reference respectively. */
831
832 static bool
833 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
834 struct param_analysis_info *parms_ainfo, gimple stmt,
835 tree op, int *index_p, HOST_WIDE_INT *offset_p,
836 bool *by_ref_p)
837 {
838 int index;
839 HOST_WIDE_INT size, max_size;
840 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
841
842 if (max_size == -1 || max_size != size || *offset_p < 0)
843 return false;
844
845 if (DECL_P (base))
846 {
847 int index = ipa_get_param_decl_index_1 (descriptors, base);
848 if (index >= 0
849 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
850 : NULL, stmt, op))
851 {
852 *index_p = index;
853 *by_ref_p = false;
854 return true;
855 }
856 return false;
857 }
858
859 if (TREE_CODE (base) != MEM_REF
860 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
861 || !integer_zerop (TREE_OPERAND (base, 1)))
862 return false;
863
864 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
865 {
866 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
867 index = ipa_get_param_decl_index_1 (descriptors, parm);
868 }
869 else
870 {
871 /* This branch catches situations where a pointer parameter is not a
872 gimple register, for example:
873
874 void hip7(S*) (struct S * p)
875 {
876 void (*<T2e4>) (struct S *) D.1867;
877 struct S * p.1;
878
879 <bb 2>:
880 p.1_1 = p;
881 D.1867_2 = p.1_1->f;
882 D.1867_2 ();
883 gdp = &p;
884 */
885
886 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
887 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
888 }
889
890 if (index >= 0
891 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
892 stmt, op))
893 {
894 *index_p = index;
895 *by_ref_p = true;
896 return true;
897 }
898 return false;
899 }
900
901 /* Just like the previous function, just without the param_analysis_info
902 pointer, for users outside of this file. */
903
904 bool
905 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
906 tree op, int *index_p, HOST_WIDE_INT *offset_p,
907 bool *by_ref_p)
908 {
909 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
910 offset_p, by_ref_p);
911 }
912
913 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
914 of an assignment statement STMT, try to determine whether we are actually
915 handling any of the following cases and construct an appropriate jump
916 function into JFUNC if so:
917
918 1) The passed value is loaded from a formal parameter which is not a gimple
919 register (most probably because it is addressable, the value has to be
920 scalar) and we can guarantee the value has not changed. This case can
921 therefore be described by a simple pass-through jump function. For example:
922
923 foo (int a)
924 {
925 int a.0;
926
927 a.0_2 = a;
928 bar (a.0_2);
929
930 2) The passed value can be described by a simple arithmetic pass-through
931 jump function. E.g.
932
933 foo (int a)
934 {
935 int D.2064;
936
937 D.2064_4 = a.1(D) + 4;
938 bar (D.2064_4);
939
940 This case can also occur in combination of the previous one, e.g.:
941
942 foo (int a, int z)
943 {
944 int a.0;
945 int D.2064;
946
947 a.0_3 = a;
948 D.2064_4 = a.0_3 + 4;
949 foo (D.2064_4);
950
951 3) The passed value is an address of an object within another one (which
952 also passed by reference). Such situations are described by an ancestor
953 jump function and describe situations such as:
954
955 B::foo() (struct B * const this)
956 {
957 struct A * D.1845;
958
959 D.1845_2 = &this_1(D)->D.1748;
960 A::bar (D.1845_2);
961
962 INFO is the structure describing individual parameters access different
963 stages of IPA optimizations. PARMS_AINFO contains the information that is
964 only needed for intraprocedural analysis. */
965
966 static void
967 compute_complex_assign_jump_func (struct ipa_node_params *info,
968 struct param_analysis_info *parms_ainfo,
969 struct ipa_jump_func *jfunc,
970 gimple call, gimple stmt, tree name)
971 {
972 HOST_WIDE_INT offset, size, max_size;
973 tree op1, tc_ssa, base, ssa;
974 int index;
975
976 op1 = gimple_assign_rhs1 (stmt);
977
978 if (TREE_CODE (op1) == SSA_NAME)
979 {
980 if (SSA_NAME_IS_DEFAULT_DEF (op1))
981 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
982 else
983 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
984 SSA_NAME_DEF_STMT (op1));
985 tc_ssa = op1;
986 }
987 else
988 {
989 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
990 tc_ssa = gimple_assign_lhs (stmt);
991 }
992
993 if (index >= 0)
994 {
995 tree op2 = gimple_assign_rhs2 (stmt);
996
997 if (op2)
998 {
999 if (!is_gimple_ip_invariant (op2)
1000 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1001 && !useless_type_conversion_p (TREE_TYPE (name),
1002 TREE_TYPE (op1))))
1003 return;
1004
1005 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1006 gimple_assign_rhs_code (stmt));
1007 }
1008 else if (gimple_assign_single_p (stmt)
1009 && !detect_type_change_ssa (tc_ssa, call, jfunc))
1010 {
1011 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1012 call, tc_ssa);
1013 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1014 }
1015 return;
1016 }
1017
1018 if (TREE_CODE (op1) != ADDR_EXPR)
1019 return;
1020 op1 = TREE_OPERAND (op1, 0);
1021 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1022 return;
1023 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1024 if (TREE_CODE (base) != MEM_REF
1025 /* If this is a varying address, punt. */
1026 || max_size == -1
1027 || max_size != size)
1028 return;
1029 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1030 ssa = TREE_OPERAND (base, 0);
1031 if (TREE_CODE (ssa) != SSA_NAME
1032 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1033 || offset < 0)
1034 return;
1035
1036 /* Dynamic types are changed only in constructors and destructors and */
1037 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1038 if (index >= 0
1039 && !detect_type_change (op1, base, call, jfunc, offset))
1040 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1041 parm_ref_data_pass_through_p (&parms_ainfo[index],
1042 call, ssa));
1043 }
1044
1045 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1046 it looks like:
1047
1048 iftmp.1_3 = &obj_2(D)->D.1762;
1049
1050 The base of the MEM_REF must be a default definition SSA NAME of a
1051 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1052 whole MEM_REF expression is returned and the offset calculated from any
1053 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1054 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1055
1056 static tree
1057 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1058 {
1059 HOST_WIDE_INT size, max_size;
1060 tree expr, parm, obj;
1061
1062 if (!gimple_assign_single_p (assign))
1063 return NULL_TREE;
1064 expr = gimple_assign_rhs1 (assign);
1065
1066 if (TREE_CODE (expr) != ADDR_EXPR)
1067 return NULL_TREE;
1068 expr = TREE_OPERAND (expr, 0);
1069 obj = expr;
1070 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1071
1072 if (TREE_CODE (expr) != MEM_REF
1073 /* If this is a varying address, punt. */
1074 || max_size == -1
1075 || max_size != size
1076 || *offset < 0)
1077 return NULL_TREE;
1078 parm = TREE_OPERAND (expr, 0);
1079 if (TREE_CODE (parm) != SSA_NAME
1080 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1081 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1082 return NULL_TREE;
1083
1084 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1085 *obj_p = obj;
1086 return expr;
1087 }
1088
1089
1090 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1091 statement PHI, try to find out whether NAME is in fact a
1092 multiple-inheritance typecast from a descendant into an ancestor of a formal
1093 parameter and thus can be described by an ancestor jump function and if so,
1094 write the appropriate function into JFUNC.
1095
1096 Essentially we want to match the following pattern:
1097
1098 if (obj_2(D) != 0B)
1099 goto <bb 3>;
1100 else
1101 goto <bb 4>;
1102
1103 <bb 3>:
1104 iftmp.1_3 = &obj_2(D)->D.1762;
1105
1106 <bb 4>:
1107 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1108 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1109 return D.1879_6; */
1110
1111 static void
1112 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1113 struct param_analysis_info *parms_ainfo,
1114 struct ipa_jump_func *jfunc,
1115 gimple call, gimple phi)
1116 {
1117 HOST_WIDE_INT offset;
1118 gimple assign, cond;
1119 basic_block phi_bb, assign_bb, cond_bb;
1120 tree tmp, parm, expr, obj;
1121 int index, i;
1122
1123 if (gimple_phi_num_args (phi) != 2)
1124 return;
1125
1126 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1127 tmp = PHI_ARG_DEF (phi, 0);
1128 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1129 tmp = PHI_ARG_DEF (phi, 1);
1130 else
1131 return;
1132 if (TREE_CODE (tmp) != SSA_NAME
1133 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1134 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1135 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1136 return;
1137
1138 assign = SSA_NAME_DEF_STMT (tmp);
1139 assign_bb = gimple_bb (assign);
1140 if (!single_pred_p (assign_bb))
1141 return;
1142 expr = get_ancestor_addr_info (assign, &obj, &offset);
1143 if (!expr)
1144 return;
1145 parm = TREE_OPERAND (expr, 0);
1146 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1147 gcc_assert (index >= 0);
1148
1149 cond_bb = single_pred (assign_bb);
1150 cond = last_stmt (cond_bb);
1151 if (!cond
1152 || gimple_code (cond) != GIMPLE_COND
1153 || gimple_cond_code (cond) != NE_EXPR
1154 || gimple_cond_lhs (cond) != parm
1155 || !integer_zerop (gimple_cond_rhs (cond)))
1156 return;
1157
1158 phi_bb = gimple_bb (phi);
1159 for (i = 0; i < 2; i++)
1160 {
1161 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1162 if (pred != assign_bb && pred != cond_bb)
1163 return;
1164 }
1165
1166 if (!detect_type_change (obj, expr, call, jfunc, offset))
1167 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1168 parm_ref_data_pass_through_p (&parms_ainfo[index],
1169 call, parm));
1170 }
1171
1172 /* Given OP which is passed as an actual argument to a called function,
1173 determine if it is possible to construct a KNOWN_TYPE jump function for it
1174 and if so, create one and store it to JFUNC. */
1175
1176 static void
1177 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1178 gimple call)
1179 {
1180 HOST_WIDE_INT offset, size, max_size;
1181 tree base;
1182
1183 if (!flag_devirtualize
1184 || TREE_CODE (op) != ADDR_EXPR
1185 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1186 return;
1187
1188 op = TREE_OPERAND (op, 0);
1189 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1190 if (!DECL_P (base)
1191 || max_size == -1
1192 || max_size != size
1193 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1194 || is_global_var (base))
1195 return;
1196
1197 if (!TYPE_BINFO (TREE_TYPE (base))
1198 || detect_type_change (op, base, call, jfunc, offset))
1199 return;
1200
1201 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1202 }
1203
1204 /* Inspect the given TYPE and return true iff it has the same structure (the
1205 same number of fields of the same types) as a C++ member pointer. If
1206 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1207 corresponding fields there. */
1208
1209 static bool
1210 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1211 {
1212 tree fld;
1213
1214 if (TREE_CODE (type) != RECORD_TYPE)
1215 return false;
1216
1217 fld = TYPE_FIELDS (type);
1218 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1219 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1220 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1221 return false;
1222
1223 if (method_ptr)
1224 *method_ptr = fld;
1225
1226 fld = DECL_CHAIN (fld);
1227 if (!fld || INTEGRAL_TYPE_P (fld)
1228 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1229 return false;
1230 if (delta)
1231 *delta = fld;
1232
1233 if (DECL_CHAIN (fld))
1234 return false;
1235
1236 return true;
1237 }
1238
1239 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1240 return the rhs of its defining statement. Otherwise return RHS as it
1241 is. */
1242
1243 static inline tree
1244 get_ssa_def_if_simple_copy (tree rhs)
1245 {
1246 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1247 {
1248 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1249
1250 if (gimple_assign_single_p (def_stmt))
1251 rhs = gimple_assign_rhs1 (def_stmt);
1252 else
1253 break;
1254 }
1255 return rhs;
1256 }
1257
1258 /* Simple linked list, describing known contents of an aggregate beforere
1259 call. */
1260
1261 struct ipa_known_agg_contents_list
1262 {
1263 /* Offset and size of the described part of the aggregate. */
1264 HOST_WIDE_INT offset, size;
1265 /* Known constant value or NULL if the contents is known to be unknown. */
1266 tree constant;
1267 /* Pointer to the next structure in the list. */
1268 struct ipa_known_agg_contents_list *next;
1269 };
1270
1271 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1272 in ARG is filled in with constant values. ARG can either be an aggregate
1273 expression or a pointer to an aggregate. JFUNC is the jump function into
1274 which the constants are subsequently stored. */
1275
1276 static void
1277 determine_known_aggregate_parts (gimple call, tree arg,
1278 struct ipa_jump_func *jfunc)
1279 {
1280 struct ipa_known_agg_contents_list *list = NULL;
1281 int item_count = 0, const_count = 0;
1282 HOST_WIDE_INT arg_offset, arg_size;
1283 gimple_stmt_iterator gsi;
1284 tree arg_base;
1285 bool check_ref, by_ref;
1286 ao_ref r;
1287
1288 /* The function operates in three stages. First, we prepare check_ref, r,
1289 arg_base and arg_offset based on what is actually passed as an actual
1290 argument. */
1291
1292 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1293 {
1294 by_ref = true;
1295 if (TREE_CODE (arg) == SSA_NAME)
1296 {
1297 tree type_size;
1298 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1299 return;
1300 check_ref = true;
1301 arg_base = arg;
1302 arg_offset = 0;
1303 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1304 arg_size = tree_low_cst (type_size, 1);
1305 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1306 }
1307 else if (TREE_CODE (arg) == ADDR_EXPR)
1308 {
1309 HOST_WIDE_INT arg_max_size;
1310
1311 arg = TREE_OPERAND (arg, 0);
1312 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1313 &arg_max_size);
1314 if (arg_max_size == -1
1315 || arg_max_size != arg_size
1316 || arg_offset < 0)
1317 return;
1318 if (DECL_P (arg_base))
1319 {
1320 tree size;
1321 check_ref = false;
1322 size = build_int_cst (integer_type_node, arg_size);
1323 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1324 }
1325 else
1326 return;
1327 }
1328 else
1329 return;
1330 }
1331 else
1332 {
1333 HOST_WIDE_INT arg_max_size;
1334
1335 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1336
1337 by_ref = false;
1338 check_ref = false;
1339 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1340 &arg_max_size);
1341 if (arg_max_size == -1
1342 || arg_max_size != arg_size
1343 || arg_offset < 0)
1344 return;
1345
1346 ao_ref_init (&r, arg);
1347 }
1348
1349 /* Second stage walks back the BB, looks at individual statements and as long
1350 as it is confident of how the statements affect contents of the
1351 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1352 describing it. */
1353 gsi = gsi_for_stmt (call);
1354 gsi_prev (&gsi);
1355 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1356 {
1357 struct ipa_known_agg_contents_list *n, **p;
1358 gimple stmt = gsi_stmt (gsi);
1359 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1360 tree lhs, rhs, lhs_base;
1361 bool partial_overlap;
1362
1363 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1364 continue;
1365 if (!gimple_assign_single_p (stmt))
1366 break;
1367
1368 lhs = gimple_assign_lhs (stmt);
1369 rhs = gimple_assign_rhs1 (stmt);
1370 if (!is_gimple_reg_type (rhs)
1371 || TREE_CODE (lhs) == BIT_FIELD_REF
1372 || contains_bitfld_component_ref_p (lhs))
1373 break;
1374
1375 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1376 &lhs_max_size);
1377 if (lhs_max_size == -1
1378 || lhs_max_size != lhs_size
1379 || (lhs_offset < arg_offset
1380 && lhs_offset + lhs_size > arg_offset)
1381 || (lhs_offset < arg_offset + arg_size
1382 && lhs_offset + lhs_size > arg_offset + arg_size))
1383 break;
1384
1385 if (check_ref)
1386 {
1387 if (TREE_CODE (lhs_base) != MEM_REF
1388 || TREE_OPERAND (lhs_base, 0) != arg_base
1389 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1390 break;
1391 }
1392 else if (lhs_base != arg_base)
1393 {
1394 if (DECL_P (lhs_base))
1395 continue;
1396 else
1397 break;
1398 }
1399
1400 if (lhs_offset + lhs_size < arg_offset
1401 || lhs_offset >= (arg_offset + arg_size))
1402 continue;
1403
1404 partial_overlap = false;
1405 p = &list;
1406 while (*p && (*p)->offset < lhs_offset)
1407 {
1408 if ((*p)->offset + (*p)->size > lhs_offset)
1409 {
1410 partial_overlap = true;
1411 break;
1412 }
1413 p = &(*p)->next;
1414 }
1415 if (partial_overlap)
1416 break;
1417 if (*p && (*p)->offset < lhs_offset + lhs_size)
1418 {
1419 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1420 /* We already know this value is subsequently overwritten with
1421 something else. */
1422 continue;
1423 else
1424 /* Otherwise this is a partial overlap which we cannot
1425 represent. */
1426 break;
1427 }
1428
1429 rhs = get_ssa_def_if_simple_copy (rhs);
1430 n = XALLOCA (struct ipa_known_agg_contents_list);
1431 n->size = lhs_size;
1432 n->offset = lhs_offset;
1433 if (is_gimple_ip_invariant (rhs))
1434 {
1435 n->constant = rhs;
1436 const_count++;
1437 }
1438 else
1439 n->constant = NULL_TREE;
1440 n->next = *p;
1441 *p = n;
1442
1443 item_count++;
1444 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1445 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1446 break;
1447 }
1448
1449 /* Third stage just goes over the list and creates an appropriate vector of
1450 ipa_agg_jf_item structures out of it, of sourse only if there are
1451 any known constants to begin with. */
1452
1453 if (const_count)
1454 {
1455 jfunc->agg.by_ref = by_ref;
1456 vec_alloc (jfunc->agg.items, const_count);
1457 while (list)
1458 {
1459 if (list->constant)
1460 {
1461 struct ipa_agg_jf_item item;
1462 item.offset = list->offset - arg_offset;
1463 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1464 item.value = unshare_expr_without_location (list->constant);
1465 jfunc->agg.items->quick_push (item);
1466 }
1467 list = list->next;
1468 }
1469 }
1470 }
1471
1472 /* Compute jump function for all arguments of callsite CS and insert the
1473 information in the jump_functions array in the ipa_edge_args corresponding
1474 to this callsite. */
1475
1476 static void
1477 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1478 struct cgraph_edge *cs)
1479 {
1480 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1481 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1482 gimple call = cs->call_stmt;
1483 int n, arg_num = gimple_call_num_args (call);
1484
1485 if (arg_num == 0 || args->jump_functions)
1486 return;
1487 vec_safe_grow_cleared (args->jump_functions, arg_num);
1488
1489 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1490 return;
1491
1492 for (n = 0; n < arg_num; n++)
1493 {
1494 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1495 tree arg = gimple_call_arg (call, n);
1496
1497 if (is_gimple_ip_invariant (arg))
1498 ipa_set_jf_constant (jfunc, arg, cs);
1499 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1500 && TREE_CODE (arg) == PARM_DECL)
1501 {
1502 int index = ipa_get_param_decl_index (info, arg);
1503
1504 gcc_assert (index >=0);
1505 /* Aggregate passed by value, check for pass-through, otherwise we
1506 will attempt to fill in aggregate contents later in this
1507 for cycle. */
1508 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1509 {
1510 ipa_set_jf_simple_pass_through (jfunc, index, false);
1511 continue;
1512 }
1513 }
1514 else if (TREE_CODE (arg) == SSA_NAME)
1515 {
1516 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1517 {
1518 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1519 if (index >= 0
1520 && !detect_type_change_ssa (arg, call, jfunc))
1521 {
1522 bool agg_p;
1523 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1524 call, arg);
1525 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1526 }
1527 }
1528 else
1529 {
1530 gimple stmt = SSA_NAME_DEF_STMT (arg);
1531 if (is_gimple_assign (stmt))
1532 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1533 call, stmt, arg);
1534 else if (gimple_code (stmt) == GIMPLE_PHI)
1535 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1536 call, stmt);
1537 }
1538 }
1539 else
1540 compute_known_type_jump_func (arg, jfunc, call);
1541
1542 if ((jfunc->type != IPA_JF_PASS_THROUGH
1543 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1544 && (jfunc->type != IPA_JF_ANCESTOR
1545 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1546 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1547 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1548 determine_known_aggregate_parts (call, arg, jfunc);
1549 }
1550 }
1551
1552 /* Compute jump functions for all edges - both direct and indirect - outgoing
1553 from NODE. Also count the actual arguments in the process. */
1554
1555 static void
1556 ipa_compute_jump_functions (struct cgraph_node *node,
1557 struct param_analysis_info *parms_ainfo)
1558 {
1559 struct cgraph_edge *cs;
1560
1561 for (cs = node->callees; cs; cs = cs->next_callee)
1562 {
1563 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1564 NULL);
1565 /* We do not need to bother analyzing calls to unknown
1566 functions unless they may become known during lto/whopr. */
1567 if (!callee->symbol.definition && !flag_lto)
1568 continue;
1569 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1570 }
1571
1572 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1573 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1574 }
1575
1576 /* If STMT looks like a statement loading a value from a member pointer formal
1577 parameter, return that parameter and store the offset of the field to
1578 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1579 might be clobbered). If USE_DELTA, then we look for a use of the delta
1580 field rather than the pfn. */
1581
1582 static tree
1583 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1584 HOST_WIDE_INT *offset_p)
1585 {
1586 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1587
1588 if (!gimple_assign_single_p (stmt))
1589 return NULL_TREE;
1590
1591 rhs = gimple_assign_rhs1 (stmt);
1592 if (TREE_CODE (rhs) == COMPONENT_REF)
1593 {
1594 ref_field = TREE_OPERAND (rhs, 1);
1595 rhs = TREE_OPERAND (rhs, 0);
1596 }
1597 else
1598 ref_field = NULL_TREE;
1599 if (TREE_CODE (rhs) != MEM_REF)
1600 return NULL_TREE;
1601 rec = TREE_OPERAND (rhs, 0);
1602 if (TREE_CODE (rec) != ADDR_EXPR)
1603 return NULL_TREE;
1604 rec = TREE_OPERAND (rec, 0);
1605 if (TREE_CODE (rec) != PARM_DECL
1606 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1607 return NULL_TREE;
1608 ref_offset = TREE_OPERAND (rhs, 1);
1609
1610 if (use_delta)
1611 fld = delta_field;
1612 else
1613 fld = ptr_field;
1614 if (offset_p)
1615 *offset_p = int_bit_position (fld);
1616
1617 if (ref_field)
1618 {
1619 if (integer_nonzerop (ref_offset))
1620 return NULL_TREE;
1621 return ref_field == fld ? rec : NULL_TREE;
1622 }
1623 else
1624 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1625 : NULL_TREE;
1626 }
1627
1628 /* Returns true iff T is an SSA_NAME defined by a statement. */
1629
1630 static bool
1631 ipa_is_ssa_with_stmt_def (tree t)
1632 {
1633 if (TREE_CODE (t) == SSA_NAME
1634 && !SSA_NAME_IS_DEFAULT_DEF (t))
1635 return true;
1636 else
1637 return false;
1638 }
1639
1640 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1641 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1642 indirect call graph edge. */
1643
1644 static struct cgraph_edge *
1645 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1646 {
1647 struct cgraph_edge *cs;
1648
1649 cs = cgraph_edge (node, stmt);
1650 cs->indirect_info->param_index = param_index;
1651 cs->indirect_info->offset = 0;
1652 cs->indirect_info->polymorphic = 0;
1653 cs->indirect_info->agg_contents = 0;
1654 cs->indirect_info->member_ptr = 0;
1655 return cs;
1656 }
1657
1658 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1659 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1660 intermediate information about each formal parameter. Currently it checks
1661 whether the call calls a pointer that is a formal parameter and if so, the
1662 parameter is marked with the called flag and an indirect call graph edge
1663 describing the call is created. This is very simple for ordinary pointers
1664 represented in SSA but not-so-nice when it comes to member pointers. The
1665 ugly part of this function does nothing more than trying to match the
1666 pattern of such a call. An example of such a pattern is the gimple dump
1667 below, the call is on the last line:
1668
1669 <bb 2>:
1670 f$__delta_5 = f.__delta;
1671 f$__pfn_24 = f.__pfn;
1672
1673 or
1674 <bb 2>:
1675 f$__delta_5 = MEM[(struct *)&f];
1676 f$__pfn_24 = MEM[(struct *)&f + 4B];
1677
1678 and a few lines below:
1679
1680 <bb 5>
1681 D.2496_3 = (int) f$__pfn_24;
1682 D.2497_4 = D.2496_3 & 1;
1683 if (D.2497_4 != 0)
1684 goto <bb 3>;
1685 else
1686 goto <bb 4>;
1687
1688 <bb 6>:
1689 D.2500_7 = (unsigned int) f$__delta_5;
1690 D.2501_8 = &S + D.2500_7;
1691 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1692 D.2503_10 = *D.2502_9;
1693 D.2504_12 = f$__pfn_24 + -1;
1694 D.2505_13 = (unsigned int) D.2504_12;
1695 D.2506_14 = D.2503_10 + D.2505_13;
1696 D.2507_15 = *D.2506_14;
1697 iftmp.11_16 = (String:: *) D.2507_15;
1698
1699 <bb 7>:
1700 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1701 D.2500_19 = (unsigned int) f$__delta_5;
1702 D.2508_20 = &S + D.2500_19;
1703 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1704
1705 Such patterns are results of simple calls to a member pointer:
1706
1707 int doprinting (int (MyString::* f)(int) const)
1708 {
1709 MyString S ("somestring");
1710
1711 return (S.*f)(4);
1712 }
1713
1714 Moreover, the function also looks for called pointers loaded from aggregates
1715 passed by value or reference. */
1716
1717 static void
1718 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1719 struct ipa_node_params *info,
1720 struct param_analysis_info *parms_ainfo,
1721 gimple call, tree target)
1722 {
1723 gimple def;
1724 tree n1, n2;
1725 gimple d1, d2;
1726 tree rec, rec2, cond;
1727 gimple branch;
1728 int index;
1729 basic_block bb, virt_bb, join;
1730 HOST_WIDE_INT offset;
1731 bool by_ref;
1732
1733 if (SSA_NAME_IS_DEFAULT_DEF (target))
1734 {
1735 tree var = SSA_NAME_VAR (target);
1736 index = ipa_get_param_decl_index (info, var);
1737 if (index >= 0)
1738 ipa_note_param_call (node, index, call);
1739 return;
1740 }
1741
1742 def = SSA_NAME_DEF_STMT (target);
1743 if (gimple_assign_single_p (def)
1744 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1745 gimple_assign_rhs1 (def), &index, &offset,
1746 &by_ref))
1747 {
1748 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1749 cs->indirect_info->offset = offset;
1750 cs->indirect_info->agg_contents = 1;
1751 cs->indirect_info->by_ref = by_ref;
1752 return;
1753 }
1754
1755 /* Now we need to try to match the complex pattern of calling a member
1756 pointer. */
1757 if (gimple_code (def) != GIMPLE_PHI
1758 || gimple_phi_num_args (def) != 2
1759 || !POINTER_TYPE_P (TREE_TYPE (target))
1760 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1761 return;
1762
1763 /* First, we need to check whether one of these is a load from a member
1764 pointer that is a parameter to this function. */
1765 n1 = PHI_ARG_DEF (def, 0);
1766 n2 = PHI_ARG_DEF (def, 1);
1767 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1768 return;
1769 d1 = SSA_NAME_DEF_STMT (n1);
1770 d2 = SSA_NAME_DEF_STMT (n2);
1771
1772 join = gimple_bb (def);
1773 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1774 {
1775 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1776 return;
1777
1778 bb = EDGE_PRED (join, 0)->src;
1779 virt_bb = gimple_bb (d2);
1780 }
1781 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1782 {
1783 bb = EDGE_PRED (join, 1)->src;
1784 virt_bb = gimple_bb (d1);
1785 }
1786 else
1787 return;
1788
1789 /* Second, we need to check that the basic blocks are laid out in the way
1790 corresponding to the pattern. */
1791
1792 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1793 || single_pred (virt_bb) != bb
1794 || single_succ (virt_bb) != join)
1795 return;
1796
1797 /* Third, let's see that the branching is done depending on the least
1798 significant bit of the pfn. */
1799
1800 branch = last_stmt (bb);
1801 if (!branch || gimple_code (branch) != GIMPLE_COND)
1802 return;
1803
1804 if ((gimple_cond_code (branch) != NE_EXPR
1805 && gimple_cond_code (branch) != EQ_EXPR)
1806 || !integer_zerop (gimple_cond_rhs (branch)))
1807 return;
1808
1809 cond = gimple_cond_lhs (branch);
1810 if (!ipa_is_ssa_with_stmt_def (cond))
1811 return;
1812
1813 def = SSA_NAME_DEF_STMT (cond);
1814 if (!is_gimple_assign (def)
1815 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1816 || !integer_onep (gimple_assign_rhs2 (def)))
1817 return;
1818
1819 cond = gimple_assign_rhs1 (def);
1820 if (!ipa_is_ssa_with_stmt_def (cond))
1821 return;
1822
1823 def = SSA_NAME_DEF_STMT (cond);
1824
1825 if (is_gimple_assign (def)
1826 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1827 {
1828 cond = gimple_assign_rhs1 (def);
1829 if (!ipa_is_ssa_with_stmt_def (cond))
1830 return;
1831 def = SSA_NAME_DEF_STMT (cond);
1832 }
1833
1834 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1835 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1836 == ptrmemfunc_vbit_in_delta),
1837 NULL);
1838 if (rec != rec2)
1839 return;
1840
1841 index = ipa_get_param_decl_index (info, rec);
1842 if (index >= 0
1843 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1844 {
1845 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1846 cs->indirect_info->offset = offset;
1847 cs->indirect_info->agg_contents = 1;
1848 cs->indirect_info->member_ptr = 1;
1849 }
1850
1851 return;
1852 }
1853
1854 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1855 object referenced in the expression is a formal parameter of the caller
1856 (described by INFO), create a call note for the statement. */
1857
1858 static void
1859 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1860 struct ipa_node_params *info, gimple call,
1861 tree target)
1862 {
1863 struct cgraph_edge *cs;
1864 struct cgraph_indirect_call_info *ii;
1865 struct ipa_jump_func jfunc;
1866 tree obj = OBJ_TYPE_REF_OBJECT (target);
1867 int index;
1868 HOST_WIDE_INT anc_offset;
1869
1870 if (!flag_devirtualize)
1871 return;
1872
1873 if (TREE_CODE (obj) != SSA_NAME)
1874 return;
1875
1876 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1877 {
1878 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1879 return;
1880
1881 anc_offset = 0;
1882 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1883 gcc_assert (index >= 0);
1884 if (detect_type_change_ssa (obj, call, &jfunc))
1885 return;
1886 }
1887 else
1888 {
1889 gimple stmt = SSA_NAME_DEF_STMT (obj);
1890 tree expr;
1891
1892 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1893 if (!expr)
1894 return;
1895 index = ipa_get_param_decl_index (info,
1896 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1897 gcc_assert (index >= 0);
1898 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1899 return;
1900 }
1901
1902 cs = ipa_note_param_call (node, index, call);
1903 ii = cs->indirect_info;
1904 ii->offset = anc_offset;
1905 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1906 ii->otr_type = obj_type_ref_class (target);
1907 ii->polymorphic = 1;
1908 }
1909
1910 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1911 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1912 containing intermediate information about each formal parameter. */
1913
1914 static void
1915 ipa_analyze_call_uses (struct cgraph_node *node,
1916 struct ipa_node_params *info,
1917 struct param_analysis_info *parms_ainfo, gimple call)
1918 {
1919 tree target = gimple_call_fn (call);
1920
1921 if (!target)
1922 return;
1923 if (TREE_CODE (target) == SSA_NAME)
1924 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1925 else if (virtual_method_call_p (target))
1926 ipa_analyze_virtual_call_uses (node, info, call, target);
1927 }
1928
1929
1930 /* Analyze the call statement STMT with respect to formal parameters (described
1931 in INFO) of caller given by NODE. Currently it only checks whether formal
1932 parameters are called. PARMS_AINFO is a pointer to a vector containing
1933 intermediate information about each formal parameter. */
1934
1935 static void
1936 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1937 struct param_analysis_info *parms_ainfo, gimple stmt)
1938 {
1939 if (is_gimple_call (stmt))
1940 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1941 }
1942
1943 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1944 If OP is a parameter declaration, mark it as used in the info structure
1945 passed in DATA. */
1946
1947 static bool
1948 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1949 tree op, void *data)
1950 {
1951 struct ipa_node_params *info = (struct ipa_node_params *) data;
1952
1953 op = get_base_address (op);
1954 if (op
1955 && TREE_CODE (op) == PARM_DECL)
1956 {
1957 int index = ipa_get_param_decl_index (info, op);
1958 gcc_assert (index >= 0);
1959 ipa_set_param_used (info, index, true);
1960 }
1961
1962 return false;
1963 }
1964
1965 /* Scan the function body of NODE and inspect the uses of formal parameters.
1966 Store the findings in various structures of the associated ipa_node_params
1967 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1968 vector containing intermediate information about each formal parameter. */
1969
1970 static void
1971 ipa_analyze_params_uses (struct cgraph_node *node,
1972 struct param_analysis_info *parms_ainfo)
1973 {
1974 tree decl = node->symbol.decl;
1975 basic_block bb;
1976 struct function *func;
1977 gimple_stmt_iterator gsi;
1978 struct ipa_node_params *info = IPA_NODE_REF (node);
1979 int i;
1980
1981 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1982 return;
1983
1984 info->uses_analysis_done = 1;
1985 if (ipa_func_spec_opts_forbid_analysis_p (node))
1986 {
1987 for (i = 0; i < ipa_get_param_count (info); i++)
1988 {
1989 ipa_set_param_used (info, i, true);
1990 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
1991 }
1992 return;
1993 }
1994
1995 for (i = 0; i < ipa_get_param_count (info); i++)
1996 {
1997 tree parm = ipa_get_param (info, i);
1998 int controlled_uses = 0;
1999
2000 /* For SSA regs see if parameter is used. For non-SSA we compute
2001 the flag during modification analysis. */
2002 if (is_gimple_reg (parm))
2003 {
2004 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
2005 parm);
2006 if (ddef && !has_zero_uses (ddef))
2007 {
2008 imm_use_iterator imm_iter;
2009 use_operand_p use_p;
2010
2011 ipa_set_param_used (info, i, true);
2012 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2013 if (!is_gimple_call (USE_STMT (use_p)))
2014 {
2015 controlled_uses = IPA_UNDESCRIBED_USE;
2016 break;
2017 }
2018 else
2019 controlled_uses++;
2020 }
2021 else
2022 controlled_uses = 0;
2023 }
2024 else
2025 controlled_uses = IPA_UNDESCRIBED_USE;
2026 ipa_set_controlled_uses (info, i, controlled_uses);
2027 }
2028
2029 func = DECL_STRUCT_FUNCTION (decl);
2030 FOR_EACH_BB_FN (bb, func)
2031 {
2032 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2033 {
2034 gimple stmt = gsi_stmt (gsi);
2035
2036 if (is_gimple_debug (stmt))
2037 continue;
2038
2039 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2040 walk_stmt_load_store_addr_ops (stmt, info,
2041 visit_ref_for_mod_analysis,
2042 visit_ref_for_mod_analysis,
2043 visit_ref_for_mod_analysis);
2044 }
2045 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2046 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2047 visit_ref_for_mod_analysis,
2048 visit_ref_for_mod_analysis,
2049 visit_ref_for_mod_analysis);
2050 }
2051 }
2052
2053 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2054
2055 static void
2056 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2057 {
2058 int i;
2059
2060 for (i = 0; i < param_count; i++)
2061 {
2062 if (parms_ainfo[i].parm_visited_statements)
2063 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2064 if (parms_ainfo[i].pt_visited_statements)
2065 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2066 }
2067 }
2068
2069 /* Initialize the array describing properties of of formal parameters
2070 of NODE, analyze their uses and compute jump functions associated
2071 with actual arguments of calls from within NODE. */
2072
2073 void
2074 ipa_analyze_node (struct cgraph_node *node)
2075 {
2076 struct ipa_node_params *info;
2077 struct param_analysis_info *parms_ainfo;
2078 int param_count;
2079
2080 ipa_check_create_node_params ();
2081 ipa_check_create_edge_args ();
2082 info = IPA_NODE_REF (node);
2083 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
2084 ipa_initialize_node_params (node);
2085
2086 param_count = ipa_get_param_count (info);
2087 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2088 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2089
2090 ipa_analyze_params_uses (node, parms_ainfo);
2091 ipa_compute_jump_functions (node, parms_ainfo);
2092
2093 free_parms_ainfo (parms_ainfo, param_count);
2094 pop_cfun ();
2095 }
2096
2097 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2098 attempt a type-based devirtualization. If successful, return the
2099 target function declaration, otherwise return NULL. */
2100
2101 tree
2102 ipa_intraprocedural_devirtualization (gimple call)
2103 {
2104 tree binfo, token, fndecl;
2105 struct ipa_jump_func jfunc;
2106 tree otr = gimple_call_fn (call);
2107
2108 jfunc.type = IPA_JF_UNKNOWN;
2109 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2110 call);
2111 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2112 return NULL_TREE;
2113 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2114 if (!binfo)
2115 return NULL_TREE;
2116 token = OBJ_TYPE_REF_TOKEN (otr);
2117 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2118 binfo);
2119 return fndecl;
2120 }
2121
2122 /* Update the jump function DST when the call graph edge corresponding to SRC is
2123 is being inlined, knowing that DST is of type ancestor and src of known
2124 type. */
2125
2126 static void
2127 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2128 struct ipa_jump_func *dst)
2129 {
2130 HOST_WIDE_INT combined_offset;
2131 tree combined_type;
2132
2133 combined_offset = ipa_get_jf_known_type_offset (src)
2134 + ipa_get_jf_ancestor_offset (dst);
2135 combined_type = ipa_get_jf_ancestor_type (dst);
2136
2137 ipa_set_jf_known_type (dst, combined_offset,
2138 ipa_get_jf_known_type_base_type (src),
2139 combined_type);
2140 }
2141
2142 /* Update the jump functions associated with call graph edge E when the call
2143 graph edge CS is being inlined, assuming that E->caller is already (possibly
2144 indirectly) inlined into CS->callee and that E has not been inlined. */
2145
2146 static void
2147 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2148 struct cgraph_edge *e)
2149 {
2150 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2151 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2152 int count = ipa_get_cs_argument_count (args);
2153 int i;
2154
2155 for (i = 0; i < count; i++)
2156 {
2157 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2158
2159 if (dst->type == IPA_JF_ANCESTOR)
2160 {
2161 struct ipa_jump_func *src;
2162 int dst_fid = dst->value.ancestor.formal_id;
2163
2164 /* Variable number of arguments can cause havoc if we try to access
2165 one that does not exist in the inlined edge. So make sure we
2166 don't. */
2167 if (dst_fid >= ipa_get_cs_argument_count (top))
2168 {
2169 dst->type = IPA_JF_UNKNOWN;
2170 continue;
2171 }
2172
2173 src = ipa_get_ith_jump_func (top, dst_fid);
2174
2175 if (src->agg.items
2176 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2177 {
2178 struct ipa_agg_jf_item *item;
2179 int j;
2180
2181 /* Currently we do not produce clobber aggregate jump functions,
2182 replace with merging when we do. */
2183 gcc_assert (!dst->agg.items);
2184
2185 dst->agg.items = vec_safe_copy (src->agg.items);
2186 dst->agg.by_ref = src->agg.by_ref;
2187 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2188 item->offset -= dst->value.ancestor.offset;
2189 }
2190
2191 if (src->type == IPA_JF_KNOWN_TYPE)
2192 combine_known_type_and_ancestor_jfs (src, dst);
2193 else if (src->type == IPA_JF_PASS_THROUGH
2194 && src->value.pass_through.operation == NOP_EXPR)
2195 {
2196 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2197 dst->value.ancestor.agg_preserved &=
2198 src->value.pass_through.agg_preserved;
2199 }
2200 else if (src->type == IPA_JF_ANCESTOR)
2201 {
2202 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2203 dst->value.ancestor.offset += src->value.ancestor.offset;
2204 dst->value.ancestor.agg_preserved &=
2205 src->value.ancestor.agg_preserved;
2206 }
2207 else
2208 dst->type = IPA_JF_UNKNOWN;
2209 }
2210 else if (dst->type == IPA_JF_PASS_THROUGH)
2211 {
2212 struct ipa_jump_func *src;
2213 /* We must check range due to calls with variable number of arguments
2214 and we cannot combine jump functions with operations. */
2215 if (dst->value.pass_through.operation == NOP_EXPR
2216 && (dst->value.pass_through.formal_id
2217 < ipa_get_cs_argument_count (top)))
2218 {
2219 bool agg_p;
2220 int dst_fid = dst->value.pass_through.formal_id;
2221 src = ipa_get_ith_jump_func (top, dst_fid);
2222 agg_p = dst->value.pass_through.agg_preserved;
2223
2224 dst->type = src->type;
2225 dst->value = src->value;
2226
2227 if (src->agg.items
2228 && (agg_p || !src->agg.by_ref))
2229 {
2230 /* Currently we do not produce clobber aggregate jump
2231 functions, replace with merging when we do. */
2232 gcc_assert (!dst->agg.items);
2233
2234 dst->agg.by_ref = src->agg.by_ref;
2235 dst->agg.items = vec_safe_copy (src->agg.items);
2236 }
2237
2238 if (!agg_p)
2239 {
2240 if (dst->type == IPA_JF_PASS_THROUGH)
2241 dst->value.pass_through.agg_preserved = false;
2242 else if (dst->type == IPA_JF_ANCESTOR)
2243 dst->value.ancestor.agg_preserved = false;
2244 }
2245 }
2246 else
2247 dst->type = IPA_JF_UNKNOWN;
2248 }
2249 }
2250 }
2251
2252 /* If TARGET is an addr_expr of a function declaration, make it the destination
2253 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2254
2255 struct cgraph_edge *
2256 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2257 {
2258 struct cgraph_node *callee;
2259 struct inline_edge_summary *es = inline_edge_summary (ie);
2260 bool unreachable = false;
2261
2262 if (TREE_CODE (target) == ADDR_EXPR)
2263 target = TREE_OPERAND (target, 0);
2264 if (TREE_CODE (target) != FUNCTION_DECL)
2265 {
2266 target = canonicalize_constructor_val (target, NULL);
2267 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2268 {
2269 if (ie->indirect_info->member_ptr)
2270 /* Member pointer call that goes through a VMT lookup. */
2271 return NULL;
2272
2273 if (dump_file)
2274 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2275 " in %s/%i, making it unreachable.\n",
2276 cgraph_node_name (ie->caller), ie->caller->symbol.order);
2277 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2278 callee = cgraph_get_create_node (target);
2279 unreachable = true;
2280 }
2281 else
2282 callee = cgraph_get_node (target);
2283 }
2284 else
2285 callee = cgraph_get_node (target);
2286
2287 /* Because may-edges are not explicitely represented and vtable may be external,
2288 we may create the first reference to the object in the unit. */
2289 if (!callee || callee->global.inlined_to)
2290 {
2291
2292 /* We are better to ensure we can refer to it.
2293 In the case of static functions we are out of luck, since we already
2294 removed its body. In the case of public functions we may or may
2295 not introduce the reference. */
2296 if (!canonicalize_constructor_val (target, NULL)
2297 || !TREE_PUBLIC (target))
2298 {
2299 if (dump_file)
2300 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2301 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2302 xstrdup (cgraph_node_name (ie->caller)),
2303 ie->caller->symbol.order,
2304 xstrdup (cgraph_node_name (ie->callee)),
2305 ie->callee->symbol.order);
2306 return NULL;
2307 }
2308 callee = cgraph_get_create_real_symbol_node (target);
2309 }
2310 ipa_check_create_node_params ();
2311
2312 /* We can not make edges to inline clones. It is bug that someone removed
2313 the cgraph node too early. */
2314 gcc_assert (!callee->global.inlined_to);
2315
2316 if (dump_file && !unreachable)
2317 {
2318 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2319 "(%s/%i -> %s/%i), for stmt ",
2320 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2321 xstrdup (cgraph_node_name (ie->caller)),
2322 ie->caller->symbol.order,
2323 xstrdup (cgraph_node_name (callee)),
2324 callee->symbol.order);
2325 if (ie->call_stmt)
2326 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2327 else
2328 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2329 }
2330 ie = cgraph_make_edge_direct (ie, callee);
2331 es = inline_edge_summary (ie);
2332 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2333 - eni_size_weights.call_cost);
2334 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2335 - eni_time_weights.call_cost);
2336
2337 return ie;
2338 }
2339
2340 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2341 return NULL if there is not any. BY_REF specifies whether the value has to
2342 be passed by reference or by value. */
2343
2344 tree
2345 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2346 HOST_WIDE_INT offset, bool by_ref)
2347 {
2348 struct ipa_agg_jf_item *item;
2349 int i;
2350
2351 if (by_ref != agg->by_ref)
2352 return NULL;
2353
2354 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2355 if (item->offset == offset)
2356 {
2357 /* Currently we do not have clobber values, return NULL for them once
2358 we do. */
2359 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2360 return item->value;
2361 }
2362 return NULL;
2363 }
2364
2365 /* Remove a reference to SYMBOL from the list of references of a node given by
2366 reference description RDESC. */
2367
2368 static void
2369 remove_described_reference (symtab_node symbol, struct ipa_cst_ref_desc *rdesc)
2370 {
2371 struct ipa_ref *to_del;
2372 struct cgraph_edge *origin;
2373
2374 origin = rdesc->cs;
2375 to_del = ipa_find_reference ((symtab_node) origin->caller, symbol,
2376 origin->call_stmt, origin->lto_stmt_uid);
2377 gcc_assert (to_del);
2378 ipa_remove_reference (to_del);
2379 if (dump_file)
2380 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2381 xstrdup (cgraph_node_name (origin->caller)),
2382 origin->caller->symbol.order, xstrdup (symtab_node_name (symbol)));
2383 }
2384
2385 /* If JFUNC has a reference description with refcount different from
2386 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2387 NULL. JFUNC must be a constant jump function. */
2388
2389 static struct ipa_cst_ref_desc *
2390 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2391 {
2392 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2393 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2394 return rdesc;
2395 else
2396 return NULL;
2397 }
2398
2399 /* Try to find a destination for indirect edge IE that corresponds to a simple
2400 call or a call of a member function pointer and where the destination is a
2401 pointer formal parameter described by jump function JFUNC. If it can be
2402 determined, return the newly direct edge, otherwise return NULL.
2403 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2404
2405 static struct cgraph_edge *
2406 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2407 struct ipa_jump_func *jfunc,
2408 struct ipa_node_params *new_root_info)
2409 {
2410 struct cgraph_edge *cs;
2411 tree target;
2412 bool agg_contents = ie->indirect_info->agg_contents;
2413 bool speculative = ie->speculative;
2414 struct ipa_cst_ref_desc *rdesc;
2415
2416 if (ie->indirect_info->agg_contents)
2417 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2418 ie->indirect_info->offset,
2419 ie->indirect_info->by_ref);
2420 else
2421 target = ipa_value_from_jfunc (new_root_info, jfunc);
2422 if (!target)
2423 return NULL;
2424 cs = ipa_make_edge_direct_to_target (ie, target);
2425
2426 /* FIXME: speculative edges can be handled. */
2427 if (cs && !agg_contents && !speculative
2428 && jfunc->type == IPA_JF_CONST
2429 && (rdesc = jfunc_rdesc_usable (jfunc))
2430 && --rdesc->refcount == 0)
2431 remove_described_reference ((symtab_node) cs->callee, rdesc);
2432
2433 return cs;
2434 }
2435
2436 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2437 call based on a formal parameter which is described by jump function JFUNC
2438 and if it can be determined, make it direct and return the direct edge.
2439 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2440 are relative to. */
2441
2442 static struct cgraph_edge *
2443 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2444 struct ipa_jump_func *jfunc,
2445 struct ipa_node_params *new_root_info)
2446 {
2447 tree binfo, target;
2448
2449 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2450
2451 if (!binfo)
2452 return NULL;
2453
2454 if (TREE_CODE (binfo) != TREE_BINFO)
2455 {
2456 binfo = gimple_extract_devirt_binfo_from_cst
2457 (binfo, ie->indirect_info->otr_type);
2458 if (!binfo)
2459 return NULL;
2460 }
2461
2462 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2463 ie->indirect_info->otr_type);
2464 if (binfo)
2465 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2466 binfo);
2467 else
2468 return NULL;
2469
2470 if (target)
2471 return ipa_make_edge_direct_to_target (ie, target);
2472 else
2473 return NULL;
2474 }
2475
2476 /* Update the param called notes associated with NODE when CS is being inlined,
2477 assuming NODE is (potentially indirectly) inlined into CS->callee.
2478 Moreover, if the callee is discovered to be constant, create a new cgraph
2479 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2480 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2481
2482 static bool
2483 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2484 struct cgraph_node *node,
2485 vec<cgraph_edge_p> *new_edges)
2486 {
2487 struct ipa_edge_args *top;
2488 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2489 struct ipa_node_params *new_root_info;
2490 bool res = false;
2491
2492 ipa_check_create_edge_args ();
2493 top = IPA_EDGE_REF (cs);
2494 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2495 ? cs->caller->global.inlined_to
2496 : cs->caller);
2497
2498 for (ie = node->indirect_calls; ie; ie = next_ie)
2499 {
2500 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2501 struct ipa_jump_func *jfunc;
2502 int param_index;
2503
2504 next_ie = ie->next_callee;
2505
2506 if (ici->param_index == -1)
2507 continue;
2508
2509 /* We must check range due to calls with variable number of arguments: */
2510 if (ici->param_index >= ipa_get_cs_argument_count (top))
2511 {
2512 ici->param_index = -1;
2513 continue;
2514 }
2515
2516 param_index = ici->param_index;
2517 jfunc = ipa_get_ith_jump_func (top, param_index);
2518
2519 if (!flag_indirect_inlining)
2520 new_direct_edge = NULL;
2521 else if (ici->polymorphic)
2522 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2523 new_root_info);
2524 else
2525 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2526 new_root_info);
2527 /* If speculation was removed, then we need to do nothing. */
2528 if (new_direct_edge && new_direct_edge != ie)
2529 {
2530 new_direct_edge->indirect_inlining_edge = 1;
2531 top = IPA_EDGE_REF (cs);
2532 res = true;
2533 }
2534 else if (new_direct_edge)
2535 {
2536 new_direct_edge->indirect_inlining_edge = 1;
2537 if (new_direct_edge->call_stmt)
2538 new_direct_edge->call_stmt_cannot_inline_p
2539 = !gimple_check_call_matching_types (
2540 new_direct_edge->call_stmt,
2541 new_direct_edge->callee->symbol.decl, false);
2542 if (new_edges)
2543 {
2544 new_edges->safe_push (new_direct_edge);
2545 res = true;
2546 }
2547 top = IPA_EDGE_REF (cs);
2548 }
2549 else if (jfunc->type == IPA_JF_PASS_THROUGH
2550 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2551 {
2552 if (ici->agg_contents
2553 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2554 ici->param_index = -1;
2555 else
2556 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2557 }
2558 else if (jfunc->type == IPA_JF_ANCESTOR)
2559 {
2560 if (ici->agg_contents
2561 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2562 ici->param_index = -1;
2563 else
2564 {
2565 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2566 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2567 }
2568 }
2569 else
2570 /* Either we can find a destination for this edge now or never. */
2571 ici->param_index = -1;
2572 }
2573
2574 return res;
2575 }
2576
2577 /* Recursively traverse subtree of NODE (including node) made of inlined
2578 cgraph_edges when CS has been inlined and invoke
2579 update_indirect_edges_after_inlining on all nodes and
2580 update_jump_functions_after_inlining on all non-inlined edges that lead out
2581 of this subtree. Newly discovered indirect edges will be added to
2582 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2583 created. */
2584
2585 static bool
2586 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2587 struct cgraph_node *node,
2588 vec<cgraph_edge_p> *new_edges)
2589 {
2590 struct cgraph_edge *e;
2591 bool res;
2592
2593 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2594
2595 for (e = node->callees; e; e = e->next_callee)
2596 if (!e->inline_failed)
2597 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2598 else
2599 update_jump_functions_after_inlining (cs, e);
2600 for (e = node->indirect_calls; e; e = e->next_callee)
2601 update_jump_functions_after_inlining (cs, e);
2602
2603 return res;
2604 }
2605
2606 /* Combine two controlled uses counts as done during inlining. */
2607
2608 static int
2609 combine_controlled_uses_counters (int c, int d)
2610 {
2611 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2612 return IPA_UNDESCRIBED_USE;
2613 else
2614 return c + d - 1;
2615 }
2616
2617 /* Propagate number of controlled users from CS->caleee to the new root of the
2618 tree of inlined nodes. */
2619
2620 static void
2621 propagate_controlled_uses (struct cgraph_edge *cs)
2622 {
2623 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2624 struct cgraph_node *new_root = cs->caller->global.inlined_to
2625 ? cs->caller->global.inlined_to : cs->caller;
2626 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2627 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2628 int count, i;
2629
2630 count = MIN (ipa_get_cs_argument_count (args),
2631 ipa_get_param_count (old_root_info));
2632 for (i = 0; i < count; i++)
2633 {
2634 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2635 struct ipa_cst_ref_desc *rdesc;
2636
2637 if (jf->type == IPA_JF_PASS_THROUGH)
2638 {
2639 int src_idx, c, d;
2640 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2641 c = ipa_get_controlled_uses (new_root_info, src_idx);
2642 d = ipa_get_controlled_uses (old_root_info, i);
2643
2644 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2645 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2646 c = combine_controlled_uses_counters (c, d);
2647 ipa_set_controlled_uses (new_root_info, src_idx, c);
2648 if (c == 0 && new_root_info->ipcp_orig_node)
2649 {
2650 struct cgraph_node *n;
2651 struct ipa_ref *ref;
2652 tree t = new_root_info->known_vals[src_idx];
2653
2654 if (t && TREE_CODE (t) == ADDR_EXPR
2655 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2656 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2657 && (ref = ipa_find_reference ((symtab_node) new_root,
2658 (symtab_node) n, NULL, 0)))
2659 {
2660 if (dump_file)
2661 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2662 "reference from %s/%i to %s/%i.\n",
2663 xstrdup (cgraph_node_name (new_root)),
2664 new_root->symbol.order,
2665 xstrdup (cgraph_node_name (n)), n->symbol.order);
2666 ipa_remove_reference (ref);
2667 }
2668 }
2669 }
2670 else if (jf->type == IPA_JF_CONST
2671 && (rdesc = jfunc_rdesc_usable (jf)))
2672 {
2673 int d = ipa_get_controlled_uses (old_root_info, i);
2674 int c = rdesc->refcount;
2675 rdesc->refcount = combine_controlled_uses_counters (c, d);
2676 if (rdesc->refcount == 0)
2677 {
2678 tree cst = ipa_get_jf_constant (jf);
2679 struct cgraph_node *n;
2680 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2681 && TREE_CODE (TREE_OPERAND (cst, 0))
2682 == FUNCTION_DECL);
2683 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2684 if (n)
2685 {
2686 struct cgraph_node *clone;
2687 remove_described_reference ((symtab_node) n, rdesc);
2688
2689 clone = cs->caller;
2690 while (clone->global.inlined_to
2691 && clone != rdesc->cs->caller
2692 && IPA_NODE_REF (clone)->ipcp_orig_node)
2693 {
2694 struct ipa_ref *ref;
2695 ref = ipa_find_reference ((symtab_node) clone,
2696 (symtab_node) n, NULL, 0);
2697 if (ref)
2698 {
2699 if (dump_file)
2700 fprintf (dump_file, "ipa-prop: Removing "
2701 "cloning-created reference "
2702 "from %s/%i to %s/%i.\n",
2703 xstrdup (cgraph_node_name (clone)),
2704 clone->symbol.order,
2705 xstrdup (cgraph_node_name (n)),
2706 n->symbol.order);
2707 ipa_remove_reference (ref);
2708 }
2709 clone = clone->callers->caller;
2710 }
2711 }
2712 }
2713 }
2714 }
2715
2716 for (i = ipa_get_param_count (old_root_info);
2717 i < ipa_get_cs_argument_count (args);
2718 i++)
2719 {
2720 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2721
2722 if (jf->type == IPA_JF_CONST)
2723 {
2724 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2725 if (rdesc)
2726 rdesc->refcount = IPA_UNDESCRIBED_USE;
2727 }
2728 else if (jf->type == IPA_JF_PASS_THROUGH)
2729 ipa_set_controlled_uses (new_root_info,
2730 jf->value.pass_through.formal_id,
2731 IPA_UNDESCRIBED_USE);
2732 }
2733 }
2734
2735 /* Update jump functions and call note functions on inlining the call site CS.
2736 CS is expected to lead to a node already cloned by
2737 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2738 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2739 created. */
2740
2741 bool
2742 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2743 vec<cgraph_edge_p> *new_edges)
2744 {
2745 bool changed;
2746 /* Do nothing if the preparation phase has not been carried out yet
2747 (i.e. during early inlining). */
2748 if (!ipa_node_params_vector.exists ())
2749 return false;
2750 gcc_assert (ipa_edge_args_vector);
2751
2752 propagate_controlled_uses (cs);
2753 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2754
2755 return changed;
2756 }
2757
2758 /* Frees all dynamically allocated structures that the argument info points
2759 to. */
2760
2761 void
2762 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2763 {
2764 vec_free (args->jump_functions);
2765 memset (args, 0, sizeof (*args));
2766 }
2767
2768 /* Free all ipa_edge structures. */
2769
2770 void
2771 ipa_free_all_edge_args (void)
2772 {
2773 int i;
2774 struct ipa_edge_args *args;
2775
2776 if (!ipa_edge_args_vector)
2777 return;
2778
2779 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2780 ipa_free_edge_args_substructures (args);
2781
2782 vec_free (ipa_edge_args_vector);
2783 }
2784
2785 /* Frees all dynamically allocated structures that the param info points
2786 to. */
2787
2788 void
2789 ipa_free_node_params_substructures (struct ipa_node_params *info)
2790 {
2791 info->descriptors.release ();
2792 free (info->lattices);
2793 /* Lattice values and their sources are deallocated with their alocation
2794 pool. */
2795 info->known_vals.release ();
2796 memset (info, 0, sizeof (*info));
2797 }
2798
2799 /* Free all ipa_node_params structures. */
2800
2801 void
2802 ipa_free_all_node_params (void)
2803 {
2804 int i;
2805 struct ipa_node_params *info;
2806
2807 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
2808 ipa_free_node_params_substructures (info);
2809
2810 ipa_node_params_vector.release ();
2811 }
2812
2813 /* Set the aggregate replacements of NODE to be AGGVALS. */
2814
2815 void
2816 ipa_set_node_agg_value_chain (struct cgraph_node *node,
2817 struct ipa_agg_replacement_value *aggvals)
2818 {
2819 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
2820 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2821
2822 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2823 }
2824
2825 /* Hook that is called by cgraph.c when an edge is removed. */
2826
2827 static void
2828 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2829 {
2830 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2831 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
2832 return;
2833 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2834 }
2835
2836 /* Hook that is called by cgraph.c when a node is removed. */
2837
2838 static void
2839 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2840 {
2841 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2842 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2843 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2844 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
2845 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
2846 }
2847
2848 /* Hook that is called by cgraph.c when an edge is duplicated. */
2849
2850 static void
2851 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2852 __attribute__((unused)) void *data)
2853 {
2854 struct ipa_edge_args *old_args, *new_args;
2855 unsigned int i;
2856
2857 ipa_check_create_edge_args ();
2858
2859 old_args = IPA_EDGE_REF (src);
2860 new_args = IPA_EDGE_REF (dst);
2861
2862 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
2863
2864 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
2865 {
2866 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
2867 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
2868
2869 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
2870
2871 if (src_jf->type == IPA_JF_CONST)
2872 {
2873 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
2874
2875 if (!src_rdesc)
2876 dst_jf->value.constant.rdesc = NULL;
2877 else if (src_rdesc->cs == src)
2878 {
2879 struct ipa_cst_ref_desc *dst_rdesc;
2880 gcc_checking_assert (ipa_refdesc_pool);
2881 dst_rdesc
2882 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
2883 dst_rdesc->cs = dst;
2884 dst_rdesc->refcount = src_rdesc->refcount;
2885 if (dst->caller->global.inlined_to)
2886 {
2887 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
2888 src_rdesc->next_duplicate = dst_rdesc;
2889 }
2890 dst_jf->value.constant.rdesc = dst_rdesc;
2891 }
2892 else
2893 {
2894 struct ipa_cst_ref_desc *dst_rdesc;
2895 /* This can happen during inlining, when a JFUNC can refer to a
2896 reference taken in a function up in the tree of inline clones.
2897 We need to find the duplicate that refers to our tree of
2898 inline clones. */
2899
2900 gcc_assert (dst->caller->global.inlined_to);
2901 for (dst_rdesc = src_rdesc->next_duplicate;
2902 dst_rdesc;
2903 dst_rdesc = dst_rdesc->next_duplicate)
2904 if (dst_rdesc->cs->caller->global.inlined_to
2905 == dst->caller->global.inlined_to)
2906 break;
2907 gcc_assert (dst_rdesc);
2908 dst_jf->value.constant.rdesc = dst_rdesc;
2909 }
2910 }
2911 }
2912 }
2913
2914 /* Hook that is called by cgraph.c when a node is duplicated. */
2915
2916 static void
2917 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2918 ATTRIBUTE_UNUSED void *data)
2919 {
2920 struct ipa_node_params *old_info, *new_info;
2921 struct ipa_agg_replacement_value *old_av, *new_av;
2922
2923 ipa_check_create_node_params ();
2924 old_info = IPA_NODE_REF (src);
2925 new_info = IPA_NODE_REF (dst);
2926
2927 new_info->descriptors = old_info->descriptors.copy ();
2928 new_info->lattices = NULL;
2929 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2930
2931 new_info->uses_analysis_done = old_info->uses_analysis_done;
2932 new_info->node_enqueued = old_info->node_enqueued;
2933
2934 old_av = ipa_get_agg_replacements_for_node (src);
2935 if (!old_av)
2936 return;
2937
2938 new_av = NULL;
2939 while (old_av)
2940 {
2941 struct ipa_agg_replacement_value *v;
2942
2943 v = ggc_alloc_ipa_agg_replacement_value ();
2944 memcpy (v, old_av, sizeof (*v));
2945 v->next = new_av;
2946 new_av = v;
2947 old_av = old_av->next;
2948 }
2949 ipa_set_node_agg_value_chain (dst, new_av);
2950 }
2951
2952
2953 /* Analyze newly added function into callgraph. */
2954
2955 static void
2956 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2957 {
2958 ipa_analyze_node (node);
2959 }
2960
2961 /* Register our cgraph hooks if they are not already there. */
2962
2963 void
2964 ipa_register_cgraph_hooks (void)
2965 {
2966 if (!edge_removal_hook_holder)
2967 edge_removal_hook_holder =
2968 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2969 if (!node_removal_hook_holder)
2970 node_removal_hook_holder =
2971 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2972 if (!edge_duplication_hook_holder)
2973 edge_duplication_hook_holder =
2974 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2975 if (!node_duplication_hook_holder)
2976 node_duplication_hook_holder =
2977 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2978 function_insertion_hook_holder =
2979 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2980 }
2981
2982 /* Unregister our cgraph hooks if they are not already there. */
2983
2984 static void
2985 ipa_unregister_cgraph_hooks (void)
2986 {
2987 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2988 edge_removal_hook_holder = NULL;
2989 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2990 node_removal_hook_holder = NULL;
2991 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2992 edge_duplication_hook_holder = NULL;
2993 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2994 node_duplication_hook_holder = NULL;
2995 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2996 function_insertion_hook_holder = NULL;
2997 }
2998
2999 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3000 longer needed after ipa-cp. */
3001
3002 void
3003 ipa_free_all_structures_after_ipa_cp (void)
3004 {
3005 if (!optimize)
3006 {
3007 ipa_free_all_edge_args ();
3008 ipa_free_all_node_params ();
3009 free_alloc_pool (ipcp_sources_pool);
3010 free_alloc_pool (ipcp_values_pool);
3011 free_alloc_pool (ipcp_agg_lattice_pool);
3012 ipa_unregister_cgraph_hooks ();
3013 if (ipa_refdesc_pool)
3014 free_alloc_pool (ipa_refdesc_pool);
3015 }
3016 }
3017
3018 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3019 longer needed after indirect inlining. */
3020
3021 void
3022 ipa_free_all_structures_after_iinln (void)
3023 {
3024 ipa_free_all_edge_args ();
3025 ipa_free_all_node_params ();
3026 ipa_unregister_cgraph_hooks ();
3027 if (ipcp_sources_pool)
3028 free_alloc_pool (ipcp_sources_pool);
3029 if (ipcp_values_pool)
3030 free_alloc_pool (ipcp_values_pool);
3031 if (ipcp_agg_lattice_pool)
3032 free_alloc_pool (ipcp_agg_lattice_pool);
3033 if (ipa_refdesc_pool)
3034 free_alloc_pool (ipa_refdesc_pool);
3035 }
3036
3037 /* Print ipa_tree_map data structures of all functions in the
3038 callgraph to F. */
3039
3040 void
3041 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3042 {
3043 int i, count;
3044 tree temp;
3045 struct ipa_node_params *info;
3046
3047 if (!node->symbol.definition)
3048 return;
3049 info = IPA_NODE_REF (node);
3050 fprintf (f, " function %s/%i parameter descriptors:\n",
3051 cgraph_node_name (node), node->symbol.order);
3052 count = ipa_get_param_count (info);
3053 for (i = 0; i < count; i++)
3054 {
3055 int c;
3056
3057 temp = ipa_get_param (info, i);
3058 if (TREE_CODE (temp) == PARM_DECL)
3059 fprintf (f, " param %d : %s", i,
3060 (DECL_NAME (temp)
3061 ? (*lang_hooks.decl_printable_name) (temp, 2)
3062 : "(unnamed)"));
3063 if (ipa_is_param_used (info, i))
3064 fprintf (f, " used");
3065 c = ipa_get_controlled_uses (info, i);
3066 if (c == IPA_UNDESCRIBED_USE)
3067 fprintf (f, " undescribed_use");
3068 else
3069 fprintf (f, " controlled_uses=%i", c);
3070 fprintf (f, "\n");
3071 }
3072 }
3073
3074 /* Print ipa_tree_map data structures of all functions in the
3075 callgraph to F. */
3076
3077 void
3078 ipa_print_all_params (FILE * f)
3079 {
3080 struct cgraph_node *node;
3081
3082 fprintf (f, "\nFunction parameters:\n");
3083 FOR_EACH_FUNCTION (node)
3084 ipa_print_node_params (f, node);
3085 }
3086
3087 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3088
3089 vec<tree>
3090 ipa_get_vector_of_formal_parms (tree fndecl)
3091 {
3092 vec<tree> args;
3093 int count;
3094 tree parm;
3095
3096 gcc_assert (!flag_wpa);
3097 count = count_formal_params (fndecl);
3098 args.create (count);
3099 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3100 args.quick_push (parm);
3101
3102 return args;
3103 }
3104
3105 /* Return a heap allocated vector containing types of formal parameters of
3106 function type FNTYPE. */
3107
3108 static inline vec<tree>
3109 get_vector_of_formal_parm_types (tree fntype)
3110 {
3111 vec<tree> types;
3112 int count = 0;
3113 tree t;
3114
3115 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3116 count++;
3117
3118 types.create (count);
3119 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3120 types.quick_push (TREE_VALUE (t));
3121
3122 return types;
3123 }
3124
3125 /* Modify the function declaration FNDECL and its type according to the plan in
3126 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3127 to reflect the actual parameters being modified which are determined by the
3128 base_index field. */
3129
3130 void
3131 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3132 const char *synth_parm_prefix)
3133 {
3134 vec<tree> oparms, otypes;
3135 tree orig_type, new_type = NULL;
3136 tree old_arg_types, t, new_arg_types = NULL;
3137 tree parm, *link = &DECL_ARGUMENTS (fndecl);
3138 int i, len = adjustments.length ();
3139 tree new_reversed = NULL;
3140 bool care_for_types, last_parm_void;
3141
3142 if (!synth_parm_prefix)
3143 synth_parm_prefix = "SYNTH";
3144
3145 oparms = ipa_get_vector_of_formal_parms (fndecl);
3146 orig_type = TREE_TYPE (fndecl);
3147 old_arg_types = TYPE_ARG_TYPES (orig_type);
3148
3149 /* The following test is an ugly hack, some functions simply don't have any
3150 arguments in their type. This is probably a bug but well... */
3151 care_for_types = (old_arg_types != NULL_TREE);
3152 if (care_for_types)
3153 {
3154 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3155 == void_type_node);
3156 otypes = get_vector_of_formal_parm_types (orig_type);
3157 if (last_parm_void)
3158 gcc_assert (oparms.length () + 1 == otypes.length ());
3159 else
3160 gcc_assert (oparms.length () == otypes.length ());
3161 }
3162 else
3163 {
3164 last_parm_void = false;
3165 otypes.create (0);
3166 }
3167
3168 for (i = 0; i < len; i++)
3169 {
3170 struct ipa_parm_adjustment *adj;
3171 gcc_assert (link);
3172
3173 adj = &adjustments[i];
3174 parm = oparms[adj->base_index];
3175 adj->base = parm;
3176
3177 if (adj->copy_param)
3178 {
3179 if (care_for_types)
3180 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3181 new_arg_types);
3182 *link = parm;
3183 link = &DECL_CHAIN (parm);
3184 }
3185 else if (!adj->remove_param)
3186 {
3187 tree new_parm;
3188 tree ptype;
3189
3190 if (adj->by_ref)
3191 ptype = build_pointer_type (adj->type);
3192 else
3193 ptype = adj->type;
3194
3195 if (care_for_types)
3196 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3197
3198 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3199 ptype);
3200 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3201
3202 DECL_ARTIFICIAL (new_parm) = 1;
3203 DECL_ARG_TYPE (new_parm) = ptype;
3204 DECL_CONTEXT (new_parm) = fndecl;
3205 TREE_USED (new_parm) = 1;
3206 DECL_IGNORED_P (new_parm) = 1;
3207 layout_decl (new_parm, 0);
3208
3209 adj->base = parm;
3210 adj->reduction = new_parm;
3211
3212 *link = new_parm;
3213
3214 link = &DECL_CHAIN (new_parm);
3215 }
3216 }
3217
3218 *link = NULL_TREE;
3219
3220 if (care_for_types)
3221 {
3222 new_reversed = nreverse (new_arg_types);
3223 if (last_parm_void)
3224 {
3225 if (new_reversed)
3226 TREE_CHAIN (new_arg_types) = void_list_node;
3227 else
3228 new_reversed = void_list_node;
3229 }
3230 }
3231
3232 /* Use copy_node to preserve as much as possible from original type
3233 (debug info, attribute lists etc.)
3234 Exception is METHOD_TYPEs must have THIS argument.
3235 When we are asked to remove it, we need to build new FUNCTION_TYPE
3236 instead. */
3237 if (TREE_CODE (orig_type) != METHOD_TYPE
3238 || (adjustments[0].copy_param
3239 && adjustments[0].base_index == 0))
3240 {
3241 new_type = build_distinct_type_copy (orig_type);
3242 TYPE_ARG_TYPES (new_type) = new_reversed;
3243 }
3244 else
3245 {
3246 new_type
3247 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3248 new_reversed));
3249 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3250 DECL_VINDEX (fndecl) = NULL_TREE;
3251 }
3252
3253 /* When signature changes, we need to clear builtin info. */
3254 if (DECL_BUILT_IN (fndecl))
3255 {
3256 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3257 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3258 }
3259
3260 /* This is a new type, not a copy of an old type. Need to reassociate
3261 variants. We can handle everything except the main variant lazily. */
3262 t = TYPE_MAIN_VARIANT (orig_type);
3263 if (orig_type != t)
3264 {
3265 TYPE_MAIN_VARIANT (new_type) = t;
3266 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3267 TYPE_NEXT_VARIANT (t) = new_type;
3268 }
3269 else
3270 {
3271 TYPE_MAIN_VARIANT (new_type) = new_type;
3272 TYPE_NEXT_VARIANT (new_type) = NULL;
3273 }
3274
3275 TREE_TYPE (fndecl) = new_type;
3276 DECL_VIRTUAL_P (fndecl) = 0;
3277 otypes.release ();
3278 oparms.release ();
3279 }
3280
3281 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3282 If this is a directly recursive call, CS must be NULL. Otherwise it must
3283 contain the corresponding call graph edge. */
3284
3285 void
3286 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3287 ipa_parm_adjustment_vec adjustments)
3288 {
3289 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3290 vec<tree> vargs;
3291 vec<tree, va_gc> **debug_args = NULL;
3292 gimple new_stmt;
3293 gimple_stmt_iterator gsi, prev_gsi;
3294 tree callee_decl;
3295 int i, len;
3296
3297 len = adjustments.length ();
3298 vargs.create (len);
3299 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
3300 ipa_remove_stmt_references ((symtab_node) current_node, stmt);
3301
3302 gsi = gsi_for_stmt (stmt);
3303 prev_gsi = gsi;
3304 gsi_prev (&prev_gsi);
3305 for (i = 0; i < len; i++)
3306 {
3307 struct ipa_parm_adjustment *adj;
3308
3309 adj = &adjustments[i];
3310
3311 if (adj->copy_param)
3312 {
3313 tree arg = gimple_call_arg (stmt, adj->base_index);
3314
3315 vargs.quick_push (arg);
3316 }
3317 else if (!adj->remove_param)
3318 {
3319 tree expr, base, off;
3320 location_t loc;
3321 unsigned int deref_align;
3322 bool deref_base = false;
3323
3324 /* We create a new parameter out of the value of the old one, we can
3325 do the following kind of transformations:
3326
3327 - A scalar passed by reference is converted to a scalar passed by
3328 value. (adj->by_ref is false and the type of the original
3329 actual argument is a pointer to a scalar).
3330
3331 - A part of an aggregate is passed instead of the whole aggregate.
3332 The part can be passed either by value or by reference, this is
3333 determined by value of adj->by_ref. Moreover, the code below
3334 handles both situations when the original aggregate is passed by
3335 value (its type is not a pointer) and when it is passed by
3336 reference (it is a pointer to an aggregate).
3337
3338 When the new argument is passed by reference (adj->by_ref is true)
3339 it must be a part of an aggregate and therefore we form it by
3340 simply taking the address of a reference inside the original
3341 aggregate. */
3342
3343 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3344 base = gimple_call_arg (stmt, adj->base_index);
3345 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3346 : EXPR_LOCATION (base);
3347
3348 if (TREE_CODE (base) != ADDR_EXPR
3349 && POINTER_TYPE_P (TREE_TYPE (base)))
3350 off = build_int_cst (adj->alias_ptr_type,
3351 adj->offset / BITS_PER_UNIT);
3352 else
3353 {
3354 HOST_WIDE_INT base_offset;
3355 tree prev_base;
3356 bool addrof;
3357
3358 if (TREE_CODE (base) == ADDR_EXPR)
3359 {
3360 base = TREE_OPERAND (base, 0);
3361 addrof = true;
3362 }
3363 else
3364 addrof = false;
3365 prev_base = base;
3366 base = get_addr_base_and_unit_offset (base, &base_offset);
3367 /* Aggregate arguments can have non-invariant addresses. */
3368 if (!base)
3369 {
3370 base = build_fold_addr_expr (prev_base);
3371 off = build_int_cst (adj->alias_ptr_type,
3372 adj->offset / BITS_PER_UNIT);
3373 }
3374 else if (TREE_CODE (base) == MEM_REF)
3375 {
3376 if (!addrof)
3377 {
3378 deref_base = true;
3379 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3380 }
3381 off = build_int_cst (adj->alias_ptr_type,
3382 base_offset
3383 + adj->offset / BITS_PER_UNIT);
3384 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3385 off);
3386 base = TREE_OPERAND (base, 0);
3387 }
3388 else
3389 {
3390 off = build_int_cst (adj->alias_ptr_type,
3391 base_offset
3392 + adj->offset / BITS_PER_UNIT);
3393 base = build_fold_addr_expr (base);
3394 }
3395 }
3396
3397 if (!adj->by_ref)
3398 {
3399 tree type = adj->type;
3400 unsigned int align;
3401 unsigned HOST_WIDE_INT misalign;
3402
3403 if (deref_base)
3404 {
3405 align = deref_align;
3406 misalign = 0;
3407 }
3408 else
3409 {
3410 get_pointer_alignment_1 (base, &align, &misalign);
3411 if (TYPE_ALIGN (type) > align)
3412 align = TYPE_ALIGN (type);
3413 }
3414 misalign += (tree_to_double_int (off)
3415 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3416 * BITS_PER_UNIT);
3417 misalign = misalign & (align - 1);
3418 if (misalign != 0)
3419 align = (misalign & -misalign);
3420 if (align < TYPE_ALIGN (type))
3421 type = build_aligned_type (type, align);
3422 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3423 }
3424 else
3425 {
3426 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3427 expr = build_fold_addr_expr (expr);
3428 }
3429
3430 expr = force_gimple_operand_gsi (&gsi, expr,
3431 adj->by_ref
3432 || is_gimple_reg_type (adj->type),
3433 NULL, true, GSI_SAME_STMT);
3434 vargs.quick_push (expr);
3435 }
3436 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3437 {
3438 unsigned int ix;
3439 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3440 gimple def_temp;
3441
3442 arg = gimple_call_arg (stmt, adj->base_index);
3443 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3444 {
3445 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3446 continue;
3447 arg = fold_convert_loc (gimple_location (stmt),
3448 TREE_TYPE (origin), arg);
3449 }
3450 if (debug_args == NULL)
3451 debug_args = decl_debug_args_insert (callee_decl);
3452 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3453 if (ddecl == origin)
3454 {
3455 ddecl = (**debug_args)[ix + 1];
3456 break;
3457 }
3458 if (ddecl == NULL)
3459 {
3460 ddecl = make_node (DEBUG_EXPR_DECL);
3461 DECL_ARTIFICIAL (ddecl) = 1;
3462 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3463 DECL_MODE (ddecl) = DECL_MODE (origin);
3464
3465 vec_safe_push (*debug_args, origin);
3466 vec_safe_push (*debug_args, ddecl);
3467 }
3468 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3469 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3470 }
3471 }
3472
3473 if (dump_file && (dump_flags & TDF_DETAILS))
3474 {
3475 fprintf (dump_file, "replacing stmt:");
3476 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3477 }
3478
3479 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3480 vargs.release ();
3481 if (gimple_call_lhs (stmt))
3482 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3483
3484 gimple_set_block (new_stmt, gimple_block (stmt));
3485 if (gimple_has_location (stmt))
3486 gimple_set_location (new_stmt, gimple_location (stmt));
3487 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3488 gimple_call_copy_flags (new_stmt, stmt);
3489
3490 if (dump_file && (dump_flags & TDF_DETAILS))
3491 {
3492 fprintf (dump_file, "with stmt:");
3493 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3494 fprintf (dump_file, "\n");
3495 }
3496 gsi_replace (&gsi, new_stmt, true);
3497 if (cs)
3498 cgraph_set_call_stmt (cs, new_stmt);
3499 do
3500 {
3501 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3502 gsi_prev (&gsi);
3503 }
3504 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3505 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3506
3507 update_ssa (TODO_update_ssa);
3508 free_dominance_info (CDI_DOMINATORS);
3509 }
3510
3511 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3512
3513 static bool
3514 index_in_adjustments_multiple_times_p (int base_index,
3515 ipa_parm_adjustment_vec adjustments)
3516 {
3517 int i, len = adjustments.length ();
3518 bool one = false;
3519
3520 for (i = 0; i < len; i++)
3521 {
3522 struct ipa_parm_adjustment *adj;
3523 adj = &adjustments[i];
3524
3525 if (adj->base_index == base_index)
3526 {
3527 if (one)
3528 return true;
3529 else
3530 one = true;
3531 }
3532 }
3533 return false;
3534 }
3535
3536
3537 /* Return adjustments that should have the same effect on function parameters
3538 and call arguments as if they were first changed according to adjustments in
3539 INNER and then by adjustments in OUTER. */
3540
3541 ipa_parm_adjustment_vec
3542 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3543 ipa_parm_adjustment_vec outer)
3544 {
3545 int i, outlen = outer.length ();
3546 int inlen = inner.length ();
3547 int removals = 0;
3548 ipa_parm_adjustment_vec adjustments, tmp;
3549
3550 tmp.create (inlen);
3551 for (i = 0; i < inlen; i++)
3552 {
3553 struct ipa_parm_adjustment *n;
3554 n = &inner[i];
3555
3556 if (n->remove_param)
3557 removals++;
3558 else
3559 tmp.quick_push (*n);
3560 }
3561
3562 adjustments.create (outlen + removals);
3563 for (i = 0; i < outlen; i++)
3564 {
3565 struct ipa_parm_adjustment r;
3566 struct ipa_parm_adjustment *out = &outer[i];
3567 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3568
3569 memset (&r, 0, sizeof (r));
3570 gcc_assert (!in->remove_param);
3571 if (out->remove_param)
3572 {
3573 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3574 {
3575 r.remove_param = true;
3576 adjustments.quick_push (r);
3577 }
3578 continue;
3579 }
3580
3581 r.base_index = in->base_index;
3582 r.type = out->type;
3583
3584 /* FIXME: Create nonlocal value too. */
3585
3586 if (in->copy_param && out->copy_param)
3587 r.copy_param = true;
3588 else if (in->copy_param)
3589 r.offset = out->offset;
3590 else if (out->copy_param)
3591 r.offset = in->offset;
3592 else
3593 r.offset = in->offset + out->offset;
3594 adjustments.quick_push (r);
3595 }
3596
3597 for (i = 0; i < inlen; i++)
3598 {
3599 struct ipa_parm_adjustment *n = &inner[i];
3600
3601 if (n->remove_param)
3602 adjustments.quick_push (*n);
3603 }
3604
3605 tmp.release ();
3606 return adjustments;
3607 }
3608
3609 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3610 friendly way, assuming they are meant to be applied to FNDECL. */
3611
3612 void
3613 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3614 tree fndecl)
3615 {
3616 int i, len = adjustments.length ();
3617 bool first = true;
3618 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3619
3620 fprintf (file, "IPA param adjustments: ");
3621 for (i = 0; i < len; i++)
3622 {
3623 struct ipa_parm_adjustment *adj;
3624 adj = &adjustments[i];
3625
3626 if (!first)
3627 fprintf (file, " ");
3628 else
3629 first = false;
3630
3631 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3632 print_generic_expr (file, parms[adj->base_index], 0);
3633 if (adj->base)
3634 {
3635 fprintf (file, ", base: ");
3636 print_generic_expr (file, adj->base, 0);
3637 }
3638 if (adj->reduction)
3639 {
3640 fprintf (file, ", reduction: ");
3641 print_generic_expr (file, adj->reduction, 0);
3642 }
3643 if (adj->new_ssa_base)
3644 {
3645 fprintf (file, ", new_ssa_base: ");
3646 print_generic_expr (file, adj->new_ssa_base, 0);
3647 }
3648
3649 if (adj->copy_param)
3650 fprintf (file, ", copy_param");
3651 else if (adj->remove_param)
3652 fprintf (file, ", remove_param");
3653 else
3654 fprintf (file, ", offset %li", (long) adj->offset);
3655 if (adj->by_ref)
3656 fprintf (file, ", by_ref");
3657 print_node_brief (file, ", type: ", adj->type, 0);
3658 fprintf (file, "\n");
3659 }
3660 parms.release ();
3661 }
3662
3663 /* Dump the AV linked list. */
3664
3665 void
3666 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3667 {
3668 bool comma = false;
3669 fprintf (f, " Aggregate replacements:");
3670 for (; av; av = av->next)
3671 {
3672 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3673 av->index, av->offset);
3674 print_generic_expr (f, av->value, 0);
3675 comma = true;
3676 }
3677 fprintf (f, "\n");
3678 }
3679
3680 /* Stream out jump function JUMP_FUNC to OB. */
3681
3682 static void
3683 ipa_write_jump_function (struct output_block *ob,
3684 struct ipa_jump_func *jump_func)
3685 {
3686 struct ipa_agg_jf_item *item;
3687 struct bitpack_d bp;
3688 int i, count;
3689
3690 streamer_write_uhwi (ob, jump_func->type);
3691 switch (jump_func->type)
3692 {
3693 case IPA_JF_UNKNOWN:
3694 break;
3695 case IPA_JF_KNOWN_TYPE:
3696 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3697 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3698 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3699 break;
3700 case IPA_JF_CONST:
3701 gcc_assert (
3702 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3703 stream_write_tree (ob, jump_func->value.constant.value, true);
3704 break;
3705 case IPA_JF_PASS_THROUGH:
3706 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3707 if (jump_func->value.pass_through.operation == NOP_EXPR)
3708 {
3709 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3710 bp = bitpack_create (ob->main_stream);
3711 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3712 streamer_write_bitpack (&bp);
3713 }
3714 else
3715 {
3716 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3717 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3718 }
3719 break;
3720 case IPA_JF_ANCESTOR:
3721 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3722 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3723 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3724 bp = bitpack_create (ob->main_stream);
3725 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3726 streamer_write_bitpack (&bp);
3727 break;
3728 }
3729
3730 count = vec_safe_length (jump_func->agg.items);
3731 streamer_write_uhwi (ob, count);
3732 if (count)
3733 {
3734 bp = bitpack_create (ob->main_stream);
3735 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3736 streamer_write_bitpack (&bp);
3737 }
3738
3739 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3740 {
3741 streamer_write_uhwi (ob, item->offset);
3742 stream_write_tree (ob, item->value, true);
3743 }
3744 }
3745
3746 /* Read in jump function JUMP_FUNC from IB. */
3747
3748 static void
3749 ipa_read_jump_function (struct lto_input_block *ib,
3750 struct ipa_jump_func *jump_func,
3751 struct cgraph_edge *cs,
3752 struct data_in *data_in)
3753 {
3754 enum jump_func_type jftype;
3755 enum tree_code operation;
3756 int i, count;
3757
3758 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
3759 switch (jftype)
3760 {
3761 case IPA_JF_UNKNOWN:
3762 jump_func->type = IPA_JF_UNKNOWN;
3763 break;
3764 case IPA_JF_KNOWN_TYPE:
3765 {
3766 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3767 tree base_type = stream_read_tree (ib, data_in);
3768 tree component_type = stream_read_tree (ib, data_in);
3769
3770 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
3771 break;
3772 }
3773 case IPA_JF_CONST:
3774 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
3775 break;
3776 case IPA_JF_PASS_THROUGH:
3777 operation = (enum tree_code) streamer_read_uhwi (ib);
3778 if (operation == NOP_EXPR)
3779 {
3780 int formal_id = streamer_read_uhwi (ib);
3781 struct bitpack_d bp = streamer_read_bitpack (ib);
3782 bool agg_preserved = bp_unpack_value (&bp, 1);
3783 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
3784 }
3785 else
3786 {
3787 tree operand = stream_read_tree (ib, data_in);
3788 int formal_id = streamer_read_uhwi (ib);
3789 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
3790 operation);
3791 }
3792 break;
3793 case IPA_JF_ANCESTOR:
3794 {
3795 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3796 tree type = stream_read_tree (ib, data_in);
3797 int formal_id = streamer_read_uhwi (ib);
3798 struct bitpack_d bp = streamer_read_bitpack (ib);
3799 bool agg_preserved = bp_unpack_value (&bp, 1);
3800
3801 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved);
3802 break;
3803 }
3804 }
3805
3806 count = streamer_read_uhwi (ib);
3807 vec_alloc (jump_func->agg.items, count);
3808 if (count)
3809 {
3810 struct bitpack_d bp = streamer_read_bitpack (ib);
3811 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3812 }
3813 for (i = 0; i < count; i++)
3814 {
3815 struct ipa_agg_jf_item item;
3816 item.offset = streamer_read_uhwi (ib);
3817 item.value = stream_read_tree (ib, data_in);
3818 jump_func->agg.items->quick_push (item);
3819 }
3820 }
3821
3822 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3823 relevant to indirect inlining to OB. */
3824
3825 static void
3826 ipa_write_indirect_edge_info (struct output_block *ob,
3827 struct cgraph_edge *cs)
3828 {
3829 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3830 struct bitpack_d bp;
3831
3832 streamer_write_hwi (ob, ii->param_index);
3833 streamer_write_hwi (ob, ii->offset);
3834 bp = bitpack_create (ob->main_stream);
3835 bp_pack_value (&bp, ii->polymorphic, 1);
3836 bp_pack_value (&bp, ii->agg_contents, 1);
3837 bp_pack_value (&bp, ii->member_ptr, 1);
3838 bp_pack_value (&bp, ii->by_ref, 1);
3839 streamer_write_bitpack (&bp);
3840
3841 if (ii->polymorphic)
3842 {
3843 streamer_write_hwi (ob, ii->otr_token);
3844 stream_write_tree (ob, ii->otr_type, true);
3845 }
3846 }
3847
3848 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3849 relevant to indirect inlining from IB. */
3850
3851 static void
3852 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3853 struct data_in *data_in ATTRIBUTE_UNUSED,
3854 struct cgraph_edge *cs)
3855 {
3856 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3857 struct bitpack_d bp;
3858
3859 ii->param_index = (int) streamer_read_hwi (ib);
3860 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3861 bp = streamer_read_bitpack (ib);
3862 ii->polymorphic = bp_unpack_value (&bp, 1);
3863 ii->agg_contents = bp_unpack_value (&bp, 1);
3864 ii->member_ptr = bp_unpack_value (&bp, 1);
3865 ii->by_ref = bp_unpack_value (&bp, 1);
3866 if (ii->polymorphic)
3867 {
3868 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3869 ii->otr_type = stream_read_tree (ib, data_in);
3870 }
3871 }
3872
3873 /* Stream out NODE info to OB. */
3874
3875 static void
3876 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3877 {
3878 int node_ref;
3879 lto_symtab_encoder_t encoder;
3880 struct ipa_node_params *info = IPA_NODE_REF (node);
3881 int j;
3882 struct cgraph_edge *e;
3883 struct bitpack_d bp;
3884
3885 encoder = ob->decl_state->symtab_node_encoder;
3886 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3887 streamer_write_uhwi (ob, node_ref);
3888
3889 streamer_write_uhwi (ob, ipa_get_param_count (info));
3890 for (j = 0; j < ipa_get_param_count (info); j++)
3891 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
3892 bp = bitpack_create (ob->main_stream);
3893 gcc_assert (info->uses_analysis_done
3894 || ipa_get_param_count (info) == 0);
3895 gcc_assert (!info->node_enqueued);
3896 gcc_assert (!info->ipcp_orig_node);
3897 for (j = 0; j < ipa_get_param_count (info); j++)
3898 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3899 streamer_write_bitpack (&bp);
3900 for (j = 0; j < ipa_get_param_count (info); j++)
3901 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
3902 for (e = node->callees; e; e = e->next_callee)
3903 {
3904 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3905
3906 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3907 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3908 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3909 }
3910 for (e = node->indirect_calls; e; e = e->next_callee)
3911 {
3912 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3913
3914 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3915 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3916 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3917 ipa_write_indirect_edge_info (ob, e);
3918 }
3919 }
3920
3921 /* Stream in NODE info from IB. */
3922
3923 static void
3924 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3925 struct data_in *data_in)
3926 {
3927 struct ipa_node_params *info = IPA_NODE_REF (node);
3928 int k;
3929 struct cgraph_edge *e;
3930 struct bitpack_d bp;
3931
3932 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
3933
3934 for (k = 0; k < ipa_get_param_count (info); k++)
3935 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
3936
3937 bp = streamer_read_bitpack (ib);
3938 if (ipa_get_param_count (info) != 0)
3939 info->uses_analysis_done = true;
3940 info->node_enqueued = false;
3941 for (k = 0; k < ipa_get_param_count (info); k++)
3942 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3943 for (k = 0; k < ipa_get_param_count (info); k++)
3944 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
3945 for (e = node->callees; e; e = e->next_callee)
3946 {
3947 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3948 int count = streamer_read_uhwi (ib);
3949
3950 if (!count)
3951 continue;
3952 vec_safe_grow_cleared (args->jump_functions, count);
3953
3954 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3955 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
3956 data_in);
3957 }
3958 for (e = node->indirect_calls; e; e = e->next_callee)
3959 {
3960 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3961 int count = streamer_read_uhwi (ib);
3962
3963 if (count)
3964 {
3965 vec_safe_grow_cleared (args->jump_functions, count);
3966 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3967 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
3968 data_in);
3969 }
3970 ipa_read_indirect_edge_info (ib, data_in, e);
3971 }
3972 }
3973
3974 /* Write jump functions for nodes in SET. */
3975
3976 void
3977 ipa_prop_write_jump_functions (void)
3978 {
3979 struct cgraph_node *node;
3980 struct output_block *ob;
3981 unsigned int count = 0;
3982 lto_symtab_encoder_iterator lsei;
3983 lto_symtab_encoder_t encoder;
3984
3985
3986 if (!ipa_node_params_vector.exists ())
3987 return;
3988
3989 ob = create_output_block (LTO_section_jump_functions);
3990 encoder = ob->decl_state->symtab_node_encoder;
3991 ob->cgraph_node = NULL;
3992 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3993 lsei_next_function_in_partition (&lsei))
3994 {
3995 node = lsei_cgraph_node (lsei);
3996 if (cgraph_function_with_gimple_body_p (node)
3997 && IPA_NODE_REF (node) != NULL)
3998 count++;
3999 }
4000
4001 streamer_write_uhwi (ob, count);
4002
4003 /* Process all of the functions. */
4004 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4005 lsei_next_function_in_partition (&lsei))
4006 {
4007 node = lsei_cgraph_node (lsei);
4008 if (cgraph_function_with_gimple_body_p (node)
4009 && IPA_NODE_REF (node) != NULL)
4010 ipa_write_node_info (ob, node);
4011 }
4012 streamer_write_char_stream (ob->main_stream, 0);
4013 produce_asm (ob, NULL);
4014 destroy_output_block (ob);
4015 }
4016
4017 /* Read section in file FILE_DATA of length LEN with data DATA. */
4018
4019 static void
4020 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4021 size_t len)
4022 {
4023 const struct lto_function_header *header =
4024 (const struct lto_function_header *) data;
4025 const int cfg_offset = sizeof (struct lto_function_header);
4026 const int main_offset = cfg_offset + header->cfg_size;
4027 const int string_offset = main_offset + header->main_size;
4028 struct data_in *data_in;
4029 struct lto_input_block ib_main;
4030 unsigned int i;
4031 unsigned int count;
4032
4033 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4034 header->main_size);
4035
4036 data_in =
4037 lto_data_in_create (file_data, (const char *) data + string_offset,
4038 header->string_size, vNULL);
4039 count = streamer_read_uhwi (&ib_main);
4040
4041 for (i = 0; i < count; i++)
4042 {
4043 unsigned int index;
4044 struct cgraph_node *node;
4045 lto_symtab_encoder_t encoder;
4046
4047 index = streamer_read_uhwi (&ib_main);
4048 encoder = file_data->symtab_node_encoder;
4049 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4050 gcc_assert (node->symbol.definition);
4051 ipa_read_node_info (&ib_main, node, data_in);
4052 }
4053 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4054 len);
4055 lto_data_in_delete (data_in);
4056 }
4057
4058 /* Read ipcp jump functions. */
4059
4060 void
4061 ipa_prop_read_jump_functions (void)
4062 {
4063 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4064 struct lto_file_decl_data *file_data;
4065 unsigned int j = 0;
4066
4067 ipa_check_create_node_params ();
4068 ipa_check_create_edge_args ();
4069 ipa_register_cgraph_hooks ();
4070
4071 while ((file_data = file_data_vec[j++]))
4072 {
4073 size_t len;
4074 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4075
4076 if (data)
4077 ipa_prop_read_section (file_data, data, len);
4078 }
4079 }
4080
4081 /* After merging units, we can get mismatch in argument counts.
4082 Also decl merging might've rendered parameter lists obsolete.
4083 Also compute called_with_variable_arg info. */
4084
4085 void
4086 ipa_update_after_lto_read (void)
4087 {
4088 ipa_check_create_node_params ();
4089 ipa_check_create_edge_args ();
4090 }
4091
4092 void
4093 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4094 {
4095 int node_ref;
4096 unsigned int count = 0;
4097 lto_symtab_encoder_t encoder;
4098 struct ipa_agg_replacement_value *aggvals, *av;
4099
4100 aggvals = ipa_get_agg_replacements_for_node (node);
4101 encoder = ob->decl_state->symtab_node_encoder;
4102 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4103 streamer_write_uhwi (ob, node_ref);
4104
4105 for (av = aggvals; av; av = av->next)
4106 count++;
4107 streamer_write_uhwi (ob, count);
4108
4109 for (av = aggvals; av; av = av->next)
4110 {
4111 struct bitpack_d bp;
4112
4113 streamer_write_uhwi (ob, av->offset);
4114 streamer_write_uhwi (ob, av->index);
4115 stream_write_tree (ob, av->value, true);
4116
4117 bp = bitpack_create (ob->main_stream);
4118 bp_pack_value (&bp, av->by_ref, 1);
4119 streamer_write_bitpack (&bp);
4120 }
4121 }
4122
4123 /* Stream in the aggregate value replacement chain for NODE from IB. */
4124
4125 static void
4126 read_agg_replacement_chain (struct lto_input_block *ib,
4127 struct cgraph_node *node,
4128 struct data_in *data_in)
4129 {
4130 struct ipa_agg_replacement_value *aggvals = NULL;
4131 unsigned int count, i;
4132
4133 count = streamer_read_uhwi (ib);
4134 for (i = 0; i <count; i++)
4135 {
4136 struct ipa_agg_replacement_value *av;
4137 struct bitpack_d bp;
4138
4139 av = ggc_alloc_ipa_agg_replacement_value ();
4140 av->offset = streamer_read_uhwi (ib);
4141 av->index = streamer_read_uhwi (ib);
4142 av->value = stream_read_tree (ib, data_in);
4143 bp = streamer_read_bitpack (ib);
4144 av->by_ref = bp_unpack_value (&bp, 1);
4145 av->next = aggvals;
4146 aggvals = av;
4147 }
4148 ipa_set_node_agg_value_chain (node, aggvals);
4149 }
4150
4151 /* Write all aggregate replacement for nodes in set. */
4152
4153 void
4154 ipa_prop_write_all_agg_replacement (void)
4155 {
4156 struct cgraph_node *node;
4157 struct output_block *ob;
4158 unsigned int count = 0;
4159 lto_symtab_encoder_iterator lsei;
4160 lto_symtab_encoder_t encoder;
4161
4162 if (!ipa_node_agg_replacements)
4163 return;
4164
4165 ob = create_output_block (LTO_section_ipcp_transform);
4166 encoder = ob->decl_state->symtab_node_encoder;
4167 ob->cgraph_node = NULL;
4168 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4169 lsei_next_function_in_partition (&lsei))
4170 {
4171 node = lsei_cgraph_node (lsei);
4172 if (cgraph_function_with_gimple_body_p (node)
4173 && ipa_get_agg_replacements_for_node (node) != NULL)
4174 count++;
4175 }
4176
4177 streamer_write_uhwi (ob, count);
4178
4179 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4180 lsei_next_function_in_partition (&lsei))
4181 {
4182 node = lsei_cgraph_node (lsei);
4183 if (cgraph_function_with_gimple_body_p (node)
4184 && ipa_get_agg_replacements_for_node (node) != NULL)
4185 write_agg_replacement_chain (ob, node);
4186 }
4187 streamer_write_char_stream (ob->main_stream, 0);
4188 produce_asm (ob, NULL);
4189 destroy_output_block (ob);
4190 }
4191
4192 /* Read replacements section in file FILE_DATA of length LEN with data
4193 DATA. */
4194
4195 static void
4196 read_replacements_section (struct lto_file_decl_data *file_data,
4197 const char *data,
4198 size_t len)
4199 {
4200 const struct lto_function_header *header =
4201 (const struct lto_function_header *) data;
4202 const int cfg_offset = sizeof (struct lto_function_header);
4203 const int main_offset = cfg_offset + header->cfg_size;
4204 const int string_offset = main_offset + header->main_size;
4205 struct data_in *data_in;
4206 struct lto_input_block ib_main;
4207 unsigned int i;
4208 unsigned int count;
4209
4210 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4211 header->main_size);
4212
4213 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4214 header->string_size, vNULL);
4215 count = streamer_read_uhwi (&ib_main);
4216
4217 for (i = 0; i < count; i++)
4218 {
4219 unsigned int index;
4220 struct cgraph_node *node;
4221 lto_symtab_encoder_t encoder;
4222
4223 index = streamer_read_uhwi (&ib_main);
4224 encoder = file_data->symtab_node_encoder;
4225 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4226 gcc_assert (node->symbol.definition);
4227 read_agg_replacement_chain (&ib_main, node, data_in);
4228 }
4229 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4230 len);
4231 lto_data_in_delete (data_in);
4232 }
4233
4234 /* Read IPA-CP aggregate replacements. */
4235
4236 void
4237 ipa_prop_read_all_agg_replacement (void)
4238 {
4239 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4240 struct lto_file_decl_data *file_data;
4241 unsigned int j = 0;
4242
4243 while ((file_data = file_data_vec[j++]))
4244 {
4245 size_t len;
4246 const char *data = lto_get_section_data (file_data,
4247 LTO_section_ipcp_transform,
4248 NULL, &len);
4249 if (data)
4250 read_replacements_section (file_data, data, len);
4251 }
4252 }
4253
4254 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4255 NODE. */
4256
4257 static void
4258 adjust_agg_replacement_values (struct cgraph_node *node,
4259 struct ipa_agg_replacement_value *aggval)
4260 {
4261 struct ipa_agg_replacement_value *v;
4262 int i, c = 0, d = 0, *adj;
4263
4264 if (!node->clone.combined_args_to_skip)
4265 return;
4266
4267 for (v = aggval; v; v = v->next)
4268 {
4269 gcc_assert (v->index >= 0);
4270 if (c < v->index)
4271 c = v->index;
4272 }
4273 c++;
4274
4275 adj = XALLOCAVEC (int, c);
4276 for (i = 0; i < c; i++)
4277 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4278 {
4279 adj[i] = -1;
4280 d++;
4281 }
4282 else
4283 adj[i] = i - d;
4284
4285 for (v = aggval; v; v = v->next)
4286 v->index = adj[v->index];
4287 }
4288
4289
4290 /* Function body transformation phase. */
4291
4292 unsigned int
4293 ipcp_transform_function (struct cgraph_node *node)
4294 {
4295 vec<ipa_param_descriptor_t> descriptors = vNULL;
4296 struct param_analysis_info *parms_ainfo;
4297 struct ipa_agg_replacement_value *aggval;
4298 gimple_stmt_iterator gsi;
4299 basic_block bb;
4300 int param_count;
4301 bool cfg_changed = false, something_changed = false;
4302
4303 gcc_checking_assert (cfun);
4304 gcc_checking_assert (current_function_decl);
4305
4306 if (dump_file)
4307 fprintf (dump_file, "Modification phase of node %s/%i\n",
4308 cgraph_node_name (node), node->symbol.order);
4309
4310 aggval = ipa_get_agg_replacements_for_node (node);
4311 if (!aggval)
4312 return 0;
4313 param_count = count_formal_params (node->symbol.decl);
4314 if (param_count == 0)
4315 return 0;
4316 adjust_agg_replacement_values (node, aggval);
4317 if (dump_file)
4318 ipa_dump_agg_replacement_values (dump_file, aggval);
4319 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4320 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4321 descriptors.safe_grow_cleared (param_count);
4322 ipa_populate_param_decls (node, descriptors);
4323
4324 FOR_EACH_BB (bb)
4325 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4326 {
4327 struct ipa_agg_replacement_value *v;
4328 gimple stmt = gsi_stmt (gsi);
4329 tree rhs, val, t;
4330 HOST_WIDE_INT offset;
4331 int index;
4332 bool by_ref, vce;
4333
4334 if (!gimple_assign_load_p (stmt))
4335 continue;
4336 rhs = gimple_assign_rhs1 (stmt);
4337 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4338 continue;
4339
4340 vce = false;
4341 t = rhs;
4342 while (handled_component_p (t))
4343 {
4344 /* V_C_E can do things like convert an array of integers to one
4345 bigger integer and similar things we do not handle below. */
4346 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4347 {
4348 vce = true;
4349 break;
4350 }
4351 t = TREE_OPERAND (t, 0);
4352 }
4353 if (vce)
4354 continue;
4355
4356 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4357 rhs, &index, &offset, &by_ref))
4358 continue;
4359 for (v = aggval; v; v = v->next)
4360 if (v->index == index
4361 && v->offset == offset)
4362 break;
4363 if (!v || v->by_ref != by_ref)
4364 continue;
4365
4366 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4367 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4368 {
4369 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4370 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4371 else if (TYPE_SIZE (TREE_TYPE (rhs))
4372 == TYPE_SIZE (TREE_TYPE (v->value)))
4373 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4374 else
4375 {
4376 if (dump_file)
4377 {
4378 fprintf (dump_file, " const ");
4379 print_generic_expr (dump_file, v->value, 0);
4380 fprintf (dump_file, " can't be converted to type of ");
4381 print_generic_expr (dump_file, rhs, 0);
4382 fprintf (dump_file, "\n");
4383 }
4384 continue;
4385 }
4386 }
4387 else
4388 val = v->value;
4389
4390 if (dump_file && (dump_flags & TDF_DETAILS))
4391 {
4392 fprintf (dump_file, "Modifying stmt:\n ");
4393 print_gimple_stmt (dump_file, stmt, 0, 0);
4394 }
4395 gimple_assign_set_rhs_from_tree (&gsi, val);
4396 update_stmt (stmt);
4397
4398 if (dump_file && (dump_flags & TDF_DETAILS))
4399 {
4400 fprintf (dump_file, "into:\n ");
4401 print_gimple_stmt (dump_file, stmt, 0, 0);
4402 fprintf (dump_file, "\n");
4403 }
4404
4405 something_changed = true;
4406 if (maybe_clean_eh_stmt (stmt)
4407 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4408 cfg_changed = true;
4409 }
4410
4411 (*ipa_node_agg_replacements)[node->uid] = NULL;
4412 free_parms_ainfo (parms_ainfo, param_count);
4413 descriptors.release ();
4414
4415 if (!something_changed)
4416 return 0;
4417 else if (cfg_changed)
4418 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4419 else
4420 return TODO_update_ssa_only_virtuals;
4421 }