lto-cgraph.c (get_alias_symbol): Remove weakref sanity check.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "langhooks.h"
25 #include "ggc.h"
26 #include "target.h"
27 #include "cgraph.h"
28 #include "ipa-prop.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-inline.h"
32 #include "ipa-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
41
42 /* Intermediate information about a parameter that is only useful during the
43 run of ipa_analyze_node and is not kept afterwards. */
44
45 struct param_analysis_info
46 {
47 bool parm_modified, ref_modified, pt_modified;
48 bitmap parm_visited_statements, pt_visited_statements;
49 };
50
51 /* Vector where the parameter infos are actually stored. */
52 vec<ipa_node_params_t> ipa_node_params_vector;
53 /* Vector of known aggregate values in cloned nodes. */
54 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
55 /* Vector where the parameter infos are actually stored. */
56 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
57
58 /* Holders of ipa cgraph hooks: */
59 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
60 static struct cgraph_node_hook_list *node_removal_hook_holder;
61 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
62 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
63 static struct cgraph_node_hook_list *function_insertion_hook_holder;
64
65 /* Description of a reference to an IPA constant. */
66 struct ipa_cst_ref_desc
67 {
68 /* Edge that corresponds to the statement which took the reference. */
69 struct cgraph_edge *cs;
70 /* Linked list of duplicates created when call graph edges are cloned. */
71 struct ipa_cst_ref_desc *next_duplicate;
72 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
73 if out of control. */
74 int refcount;
75 };
76
77 /* Allocation pool for reference descriptions. */
78
79 static alloc_pool ipa_refdesc_pool;
80
81 /* Return index of the formal whose tree is PTREE in function which corresponds
82 to INFO. */
83
84 static int
85 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
86 {
87 int i, count;
88
89 count = descriptors.length ();
90 for (i = 0; i < count; i++)
91 if (descriptors[i].decl == ptree)
92 return i;
93
94 return -1;
95 }
96
97 /* Return index of the formal whose tree is PTREE in function which corresponds
98 to INFO. */
99
100 int
101 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
102 {
103 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
104 }
105
106 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
107 NODE. */
108
109 static void
110 ipa_populate_param_decls (struct cgraph_node *node,
111 vec<ipa_param_descriptor_t> &descriptors)
112 {
113 tree fndecl;
114 tree fnargs;
115 tree parm;
116 int param_num;
117
118 fndecl = node->symbol.decl;
119 fnargs = DECL_ARGUMENTS (fndecl);
120 param_num = 0;
121 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
122 {
123 descriptors[param_num].decl = parm;
124 param_num++;
125 }
126 }
127
128 /* Return how many formal parameters FNDECL has. */
129
130 static inline int
131 count_formal_params (tree fndecl)
132 {
133 tree parm;
134 int count = 0;
135
136 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
137 count++;
138
139 return count;
140 }
141
142 /* Initialize the ipa_node_params structure associated with NODE by counting
143 the function parameters, creating the descriptors and populating their
144 param_decls. */
145
146 void
147 ipa_initialize_node_params (struct cgraph_node *node)
148 {
149 struct ipa_node_params *info = IPA_NODE_REF (node);
150
151 if (!info->descriptors.exists ())
152 {
153 int param_count;
154
155 param_count = count_formal_params (node->symbol.decl);
156 if (param_count)
157 {
158 info->descriptors.safe_grow_cleared (param_count);
159 ipa_populate_param_decls (node, info->descriptors);
160 }
161 }
162 }
163
164 /* Print the jump functions associated with call graph edge CS to file F. */
165
166 static void
167 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
168 {
169 int i, count;
170
171 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
172 for (i = 0; i < count; i++)
173 {
174 struct ipa_jump_func *jump_func;
175 enum jump_func_type type;
176
177 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
178 type = jump_func->type;
179
180 fprintf (f, " param %d: ", i);
181 if (type == IPA_JF_UNKNOWN)
182 fprintf (f, "UNKNOWN\n");
183 else if (type == IPA_JF_KNOWN_TYPE)
184 {
185 fprintf (f, "KNOWN TYPE: base ");
186 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
187 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
188 jump_func->value.known_type.offset);
189 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
190 fprintf (f, "\n");
191 }
192 else if (type == IPA_JF_CONST)
193 {
194 tree val = jump_func->value.constant.value;
195 fprintf (f, "CONST: ");
196 print_generic_expr (f, val, 0);
197 if (TREE_CODE (val) == ADDR_EXPR
198 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
199 {
200 fprintf (f, " -> ");
201 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
202 0);
203 }
204 fprintf (f, "\n");
205 }
206 else if (type == IPA_JF_PASS_THROUGH)
207 {
208 fprintf (f, "PASS THROUGH: ");
209 fprintf (f, "%d, op %s",
210 jump_func->value.pass_through.formal_id,
211 tree_code_name[(int)
212 jump_func->value.pass_through.operation]);
213 if (jump_func->value.pass_through.operation != NOP_EXPR)
214 {
215 fprintf (f, " ");
216 print_generic_expr (f,
217 jump_func->value.pass_through.operand, 0);
218 }
219 if (jump_func->value.pass_through.agg_preserved)
220 fprintf (f, ", agg_preserved");
221 fprintf (f, "\n");
222 }
223 else if (type == IPA_JF_ANCESTOR)
224 {
225 fprintf (f, "ANCESTOR: ");
226 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
227 jump_func->value.ancestor.formal_id,
228 jump_func->value.ancestor.offset);
229 print_generic_expr (f, jump_func->value.ancestor.type, 0);
230 if (jump_func->value.ancestor.agg_preserved)
231 fprintf (f, ", agg_preserved");
232 fprintf (f, "\n");
233 }
234
235 if (jump_func->agg.items)
236 {
237 struct ipa_agg_jf_item *item;
238 int j;
239
240 fprintf (f, " Aggregate passed by %s:\n",
241 jump_func->agg.by_ref ? "reference" : "value");
242 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
243 {
244 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
245 item->offset);
246 if (TYPE_P (item->value))
247 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
248 tree_low_cst (TYPE_SIZE (item->value), 1));
249 else
250 {
251 fprintf (f, "cst: ");
252 print_generic_expr (f, item->value, 0);
253 }
254 fprintf (f, "\n");
255 }
256 }
257 }
258 }
259
260
261 /* Print the jump functions of all arguments on all call graph edges going from
262 NODE to file F. */
263
264 void
265 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
266 {
267 struct cgraph_edge *cs;
268
269 fprintf (f, " Jump functions of caller %s/%i:\n", cgraph_node_name (node),
270 node->symbol.order);
271 for (cs = node->callees; cs; cs = cs->next_callee)
272 {
273 if (!ipa_edge_args_info_available_for_edge_p (cs))
274 continue;
275
276 fprintf (f, " callsite %s/%i -> %s/%i : \n",
277 xstrdup (cgraph_node_name (node)), node->symbol.order,
278 xstrdup (cgraph_node_name (cs->callee)),
279 cs->callee->symbol.order);
280 ipa_print_node_jump_functions_for_edge (f, cs);
281 }
282
283 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
284 {
285 struct cgraph_indirect_call_info *ii;
286 if (!ipa_edge_args_info_available_for_edge_p (cs))
287 continue;
288
289 ii = cs->indirect_info;
290 if (ii->agg_contents)
291 fprintf (f, " indirect aggregate callsite, calling param %i, "
292 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
293 ii->param_index, ii->offset,
294 ii->by_ref ? "by reference" : "by_value");
295 else
296 fprintf (f, " indirect %s callsite, calling param %i",
297 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
298
299 if (cs->call_stmt)
300 {
301 fprintf (f, ", for stmt ");
302 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
303 }
304 else
305 fprintf (f, "\n");
306 ipa_print_node_jump_functions_for_edge (f, cs);
307 }
308 }
309
310 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
311
312 void
313 ipa_print_all_jump_functions (FILE *f)
314 {
315 struct cgraph_node *node;
316
317 fprintf (f, "\nJump functions:\n");
318 FOR_EACH_FUNCTION (node)
319 {
320 ipa_print_node_jump_functions (f, node);
321 }
322 }
323
324 /* Set JFUNC to be a known type jump function. */
325
326 static void
327 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
328 tree base_type, tree component_type)
329 {
330 jfunc->type = IPA_JF_KNOWN_TYPE;
331 jfunc->value.known_type.offset = offset,
332 jfunc->value.known_type.base_type = base_type;
333 jfunc->value.known_type.component_type = component_type;
334 }
335
336 /* Set JFUNC to be a constant jmp function. */
337
338 static void
339 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
340 struct cgraph_edge *cs)
341 {
342 constant = unshare_expr (constant);
343 if (constant && EXPR_P (constant))
344 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
345 jfunc->type = IPA_JF_CONST;
346 jfunc->value.constant.value = unshare_expr_without_location (constant);
347
348 if (TREE_CODE (constant) == ADDR_EXPR
349 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
350 {
351 struct ipa_cst_ref_desc *rdesc;
352 if (!ipa_refdesc_pool)
353 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
354 sizeof (struct ipa_cst_ref_desc), 32);
355
356 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
357 rdesc->cs = cs;
358 rdesc->next_duplicate = NULL;
359 rdesc->refcount = 1;
360 jfunc->value.constant.rdesc = rdesc;
361 }
362 else
363 jfunc->value.constant.rdesc = NULL;
364 }
365
366 /* Set JFUNC to be a simple pass-through jump function. */
367 static void
368 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
369 bool agg_preserved)
370 {
371 jfunc->type = IPA_JF_PASS_THROUGH;
372 jfunc->value.pass_through.operand = NULL_TREE;
373 jfunc->value.pass_through.formal_id = formal_id;
374 jfunc->value.pass_through.operation = NOP_EXPR;
375 jfunc->value.pass_through.agg_preserved = agg_preserved;
376 }
377
378 /* Set JFUNC to be an arithmetic pass through jump function. */
379
380 static void
381 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
382 tree operand, enum tree_code operation)
383 {
384 jfunc->type = IPA_JF_PASS_THROUGH;
385 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
386 jfunc->value.pass_through.formal_id = formal_id;
387 jfunc->value.pass_through.operation = operation;
388 jfunc->value.pass_through.agg_preserved = false;
389 }
390
391 /* Set JFUNC to be an ancestor jump function. */
392
393 static void
394 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
395 tree type, int formal_id, bool agg_preserved)
396 {
397 jfunc->type = IPA_JF_ANCESTOR;
398 jfunc->value.ancestor.formal_id = formal_id;
399 jfunc->value.ancestor.offset = offset;
400 jfunc->value.ancestor.type = type;
401 jfunc->value.ancestor.agg_preserved = agg_preserved;
402 }
403
404 /* Extract the acual BINFO being described by JFUNC which must be a known type
405 jump function. */
406
407 tree
408 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
409 {
410 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
411 if (!base_binfo)
412 return NULL_TREE;
413 return get_binfo_at_offset (base_binfo,
414 jfunc->value.known_type.offset,
415 jfunc->value.known_type.component_type);
416 }
417
418 /* Structure to be passed in between detect_type_change and
419 check_stmt_for_type_change. */
420
421 struct type_change_info
422 {
423 /* Offset into the object where there is the virtual method pointer we are
424 looking for. */
425 HOST_WIDE_INT offset;
426 /* The declaration or SSA_NAME pointer of the base that we are checking for
427 type change. */
428 tree object;
429 /* If we actually can tell the type that the object has changed to, it is
430 stored in this field. Otherwise it remains NULL_TREE. */
431 tree known_current_type;
432 /* Set to true if dynamic type change has been detected. */
433 bool type_maybe_changed;
434 /* Set to true if multiple types have been encountered. known_current_type
435 must be disregarded in that case. */
436 bool multiple_types_encountered;
437 };
438
439 /* Return true if STMT can modify a virtual method table pointer.
440
441 This function makes special assumptions about both constructors and
442 destructors which are all the functions that are allowed to alter the VMT
443 pointers. It assumes that destructors begin with assignment into all VMT
444 pointers and that constructors essentially look in the following way:
445
446 1) The very first thing they do is that they call constructors of ancestor
447 sub-objects that have them.
448
449 2) Then VMT pointers of this and all its ancestors is set to new values
450 corresponding to the type corresponding to the constructor.
451
452 3) Only afterwards, other stuff such as constructor of member sub-objects
453 and the code written by the user is run. Only this may include calling
454 virtual functions, directly or indirectly.
455
456 There is no way to call a constructor of an ancestor sub-object in any
457 other way.
458
459 This means that we do not have to care whether constructors get the correct
460 type information because they will always change it (in fact, if we define
461 the type to be given by the VMT pointer, it is undefined).
462
463 The most important fact to derive from the above is that if, for some
464 statement in the section 3, we try to detect whether the dynamic type has
465 changed, we can safely ignore all calls as we examine the function body
466 backwards until we reach statements in section 2 because these calls cannot
467 be ancestor constructors or destructors (if the input is not bogus) and so
468 do not change the dynamic type (this holds true only for automatically
469 allocated objects but at the moment we devirtualize only these). We then
470 must detect that statements in section 2 change the dynamic type and can try
471 to derive the new type. That is enough and we can stop, we will never see
472 the calls into constructors of sub-objects in this code. Therefore we can
473 safely ignore all call statements that we traverse.
474 */
475
476 static bool
477 stmt_may_be_vtbl_ptr_store (gimple stmt)
478 {
479 if (is_gimple_call (stmt))
480 return false;
481 else if (is_gimple_assign (stmt))
482 {
483 tree lhs = gimple_assign_lhs (stmt);
484
485 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
486 {
487 if (flag_strict_aliasing
488 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
489 return false;
490
491 if (TREE_CODE (lhs) == COMPONENT_REF
492 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
493 return false;
494 /* In the future we might want to use get_base_ref_and_offset to find
495 if there is a field corresponding to the offset and if so, proceed
496 almost like if it was a component ref. */
497 }
498 }
499 return true;
500 }
501
502 /* If STMT can be proved to be an assignment to the virtual method table
503 pointer of ANALYZED_OBJ and the type associated with the new table
504 identified, return the type. Otherwise return NULL_TREE. */
505
506 static tree
507 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
508 {
509 HOST_WIDE_INT offset, size, max_size;
510 tree lhs, rhs, base;
511
512 if (!gimple_assign_single_p (stmt))
513 return NULL_TREE;
514
515 lhs = gimple_assign_lhs (stmt);
516 rhs = gimple_assign_rhs1 (stmt);
517 if (TREE_CODE (lhs) != COMPONENT_REF
518 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
519 || TREE_CODE (rhs) != ADDR_EXPR)
520 return NULL_TREE;
521 rhs = get_base_address (TREE_OPERAND (rhs, 0));
522 if (!rhs
523 || TREE_CODE (rhs) != VAR_DECL
524 || !DECL_VIRTUAL_P (rhs))
525 return NULL_TREE;
526
527 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
528 if (offset != tci->offset
529 || size != POINTER_SIZE
530 || max_size != POINTER_SIZE)
531 return NULL_TREE;
532 if (TREE_CODE (base) == MEM_REF)
533 {
534 if (TREE_CODE (tci->object) != MEM_REF
535 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
536 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
537 TREE_OPERAND (base, 1)))
538 return NULL_TREE;
539 }
540 else if (tci->object != base)
541 return NULL_TREE;
542
543 return DECL_CONTEXT (rhs);
544 }
545
546 /* Callback of walk_aliased_vdefs and a helper function for
547 detect_type_change to check whether a particular statement may modify
548 the virtual table pointer, and if possible also determine the new type of
549 the (sub-)object. It stores its result into DATA, which points to a
550 type_change_info structure. */
551
552 static bool
553 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
554 {
555 gimple stmt = SSA_NAME_DEF_STMT (vdef);
556 struct type_change_info *tci = (struct type_change_info *) data;
557
558 if (stmt_may_be_vtbl_ptr_store (stmt))
559 {
560 tree type;
561 type = extr_type_from_vtbl_ptr_store (stmt, tci);
562 if (tci->type_maybe_changed
563 && type != tci->known_current_type)
564 tci->multiple_types_encountered = true;
565 tci->known_current_type = type;
566 tci->type_maybe_changed = true;
567 return true;
568 }
569 else
570 return false;
571 }
572
573
574
575 /* Like detect_type_change but with extra argument COMP_TYPE which will become
576 the component type part of new JFUNC of dynamic type change is detected and
577 the new base type is identified. */
578
579 static bool
580 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
581 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
582 {
583 struct type_change_info tci;
584 ao_ref ao;
585
586 gcc_checking_assert (DECL_P (arg)
587 || TREE_CODE (arg) == MEM_REF
588 || handled_component_p (arg));
589 /* Const calls cannot call virtual methods through VMT and so type changes do
590 not matter. */
591 if (!flag_devirtualize || !gimple_vuse (call))
592 return false;
593
594 ao_ref_init (&ao, arg);
595 ao.base = base;
596 ao.offset = offset;
597 ao.size = POINTER_SIZE;
598 ao.max_size = ao.size;
599
600 tci.offset = offset;
601 tci.object = get_base_address (arg);
602 tci.known_current_type = NULL_TREE;
603 tci.type_maybe_changed = false;
604 tci.multiple_types_encountered = false;
605
606 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
607 &tci, NULL);
608 if (!tci.type_maybe_changed)
609 return false;
610
611 if (!tci.known_current_type
612 || tci.multiple_types_encountered
613 || offset != 0)
614 jfunc->type = IPA_JF_UNKNOWN;
615 else
616 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
617
618 return true;
619 }
620
621 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
622 looking for assignments to its virtual table pointer. If it is, return true
623 and fill in the jump function JFUNC with relevant type information or set it
624 to unknown. ARG is the object itself (not a pointer to it, unless
625 dereferenced). BASE is the base of the memory access as returned by
626 get_ref_base_and_extent, as is the offset. */
627
628 static bool
629 detect_type_change (tree arg, tree base, gimple call,
630 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
631 {
632 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
633 }
634
635 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
636 SSA name (its dereference will become the base and the offset is assumed to
637 be zero). */
638
639 static bool
640 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
641 {
642 tree comp_type;
643
644 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
645 if (!flag_devirtualize
646 || !POINTER_TYPE_P (TREE_TYPE (arg))
647 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
648 return false;
649
650 comp_type = TREE_TYPE (TREE_TYPE (arg));
651 arg = build2 (MEM_REF, ptr_type_node, arg,
652 build_int_cst (ptr_type_node, 0));
653
654 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
655 }
656
657 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
658 boolean variable pointed to by DATA. */
659
660 static bool
661 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
662 void *data)
663 {
664 bool *b = (bool *) data;
665 *b = true;
666 return true;
667 }
668
669 /* Return true if a load from a formal parameter PARM_LOAD is known to retreive
670 a value known not to be modified in this function before reaching the
671 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
672 information about the parameter. */
673
674 static bool
675 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
676 gimple stmt, tree parm_load)
677 {
678 bool modified = false;
679 bitmap *visited_stmts;
680 ao_ref refd;
681
682 if (parm_ainfo && parm_ainfo->parm_modified)
683 return false;
684
685 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
686 ao_ref_init (&refd, parm_load);
687 /* We can cache visited statements only when parm_ainfo is available and when
688 we are looking at a naked load of the whole parameter. */
689 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
690 visited_stmts = NULL;
691 else
692 visited_stmts = &parm_ainfo->parm_visited_statements;
693 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
694 visited_stmts);
695 if (parm_ainfo && modified)
696 parm_ainfo->parm_modified = true;
697 return !modified;
698 }
699
700 /* If STMT is an assignment that loads a value from an parameter declaration,
701 return the index of the parameter in ipa_node_params which has not been
702 modified. Otherwise return -1. */
703
704 static int
705 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
706 struct param_analysis_info *parms_ainfo,
707 gimple stmt)
708 {
709 int index;
710 tree op1;
711
712 if (!gimple_assign_single_p (stmt))
713 return -1;
714
715 op1 = gimple_assign_rhs1 (stmt);
716 if (TREE_CODE (op1) != PARM_DECL)
717 return -1;
718
719 index = ipa_get_param_decl_index_1 (descriptors, op1);
720 if (index < 0
721 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
722 : NULL, stmt, op1))
723 return -1;
724
725 return index;
726 }
727
728 /* Return true if memory reference REF loads data that are known to be
729 unmodified in this function before reaching statement STMT. PARM_AINFO, if
730 non-NULL, is a pointer to a structure containing temporary information about
731 PARM. */
732
733 static bool
734 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
735 gimple stmt, tree ref)
736 {
737 bool modified = false;
738 ao_ref refd;
739
740 gcc_checking_assert (gimple_vuse (stmt));
741 if (parm_ainfo && parm_ainfo->ref_modified)
742 return false;
743
744 ao_ref_init (&refd, ref);
745 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
746 NULL);
747 if (parm_ainfo && modified)
748 parm_ainfo->ref_modified = true;
749 return !modified;
750 }
751
752 /* Return true if the data pointed to by PARM is known to be unmodified in this
753 function before reaching call statement CALL into which it is passed.
754 PARM_AINFO is a pointer to a structure containing temporary information
755 about PARM. */
756
757 static bool
758 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
759 gimple call, tree parm)
760 {
761 bool modified = false;
762 ao_ref refd;
763
764 /* It's unnecessary to calculate anything about memory contnets for a const
765 function because it is not goin to use it. But do not cache the result
766 either. Also, no such calculations for non-pointers. */
767 if (!gimple_vuse (call)
768 || !POINTER_TYPE_P (TREE_TYPE (parm)))
769 return false;
770
771 if (parm_ainfo->pt_modified)
772 return false;
773
774 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
775 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
776 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
777 if (modified)
778 parm_ainfo->pt_modified = true;
779 return !modified;
780 }
781
782 /* Return true if we can prove that OP is a memory reference loading unmodified
783 data from an aggregate passed as a parameter and if the aggregate is passed
784 by reference, that the alias type of the load corresponds to the type of the
785 formal parameter (so that we can rely on this type for TBAA in callers).
786 INFO and PARMS_AINFO describe parameters of the current function (but the
787 latter can be NULL), STMT is the load statement. If function returns true,
788 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
789 within the aggregate and whether it is a load from a value passed by
790 reference respectively. */
791
792 static bool
793 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
794 struct param_analysis_info *parms_ainfo, gimple stmt,
795 tree op, int *index_p, HOST_WIDE_INT *offset_p,
796 bool *by_ref_p)
797 {
798 int index;
799 HOST_WIDE_INT size, max_size;
800 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
801
802 if (max_size == -1 || max_size != size || *offset_p < 0)
803 return false;
804
805 if (DECL_P (base))
806 {
807 int index = ipa_get_param_decl_index_1 (descriptors, base);
808 if (index >= 0
809 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
810 : NULL, stmt, op))
811 {
812 *index_p = index;
813 *by_ref_p = false;
814 return true;
815 }
816 return false;
817 }
818
819 if (TREE_CODE (base) != MEM_REF
820 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
821 || !integer_zerop (TREE_OPERAND (base, 1)))
822 return false;
823
824 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
825 {
826 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
827 index = ipa_get_param_decl_index_1 (descriptors, parm);
828 }
829 else
830 {
831 /* This branch catches situations where a pointer parameter is not a
832 gimple register, for example:
833
834 void hip7(S*) (struct S * p)
835 {
836 void (*<T2e4>) (struct S *) D.1867;
837 struct S * p.1;
838
839 <bb 2>:
840 p.1_1 = p;
841 D.1867_2 = p.1_1->f;
842 D.1867_2 ();
843 gdp = &p;
844 */
845
846 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
847 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
848 }
849
850 if (index >= 0
851 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
852 stmt, op))
853 {
854 *index_p = index;
855 *by_ref_p = true;
856 return true;
857 }
858 return false;
859 }
860
861 /* Just like the previous function, just without the param_analysis_info
862 pointer, for users outside of this file. */
863
864 bool
865 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
866 tree op, int *index_p, HOST_WIDE_INT *offset_p,
867 bool *by_ref_p)
868 {
869 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
870 offset_p, by_ref_p);
871 }
872
873 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
874 of an assignment statement STMT, try to determine whether we are actually
875 handling any of the following cases and construct an appropriate jump
876 function into JFUNC if so:
877
878 1) The passed value is loaded from a formal parameter which is not a gimple
879 register (most probably because it is addressable, the value has to be
880 scalar) and we can guarantee the value has not changed. This case can
881 therefore be described by a simple pass-through jump function. For example:
882
883 foo (int a)
884 {
885 int a.0;
886
887 a.0_2 = a;
888 bar (a.0_2);
889
890 2) The passed value can be described by a simple arithmetic pass-through
891 jump function. E.g.
892
893 foo (int a)
894 {
895 int D.2064;
896
897 D.2064_4 = a.1(D) + 4;
898 bar (D.2064_4);
899
900 This case can also occur in combination of the previous one, e.g.:
901
902 foo (int a, int z)
903 {
904 int a.0;
905 int D.2064;
906
907 a.0_3 = a;
908 D.2064_4 = a.0_3 + 4;
909 foo (D.2064_4);
910
911 3) The passed value is an address of an object within another one (which
912 also passed by reference). Such situations are described by an ancestor
913 jump function and describe situations such as:
914
915 B::foo() (struct B * const this)
916 {
917 struct A * D.1845;
918
919 D.1845_2 = &this_1(D)->D.1748;
920 A::bar (D.1845_2);
921
922 INFO is the structure describing individual parameters access different
923 stages of IPA optimizations. PARMS_AINFO contains the information that is
924 only needed for intraprocedural analysis. */
925
926 static void
927 compute_complex_assign_jump_func (struct ipa_node_params *info,
928 struct param_analysis_info *parms_ainfo,
929 struct ipa_jump_func *jfunc,
930 gimple call, gimple stmt, tree name)
931 {
932 HOST_WIDE_INT offset, size, max_size;
933 tree op1, tc_ssa, base, ssa;
934 int index;
935
936 op1 = gimple_assign_rhs1 (stmt);
937
938 if (TREE_CODE (op1) == SSA_NAME)
939 {
940 if (SSA_NAME_IS_DEFAULT_DEF (op1))
941 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
942 else
943 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
944 SSA_NAME_DEF_STMT (op1));
945 tc_ssa = op1;
946 }
947 else
948 {
949 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
950 tc_ssa = gimple_assign_lhs (stmt);
951 }
952
953 if (index >= 0)
954 {
955 tree op2 = gimple_assign_rhs2 (stmt);
956
957 if (op2)
958 {
959 if (!is_gimple_ip_invariant (op2)
960 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
961 && !useless_type_conversion_p (TREE_TYPE (name),
962 TREE_TYPE (op1))))
963 return;
964
965 ipa_set_jf_arith_pass_through (jfunc, index, op2,
966 gimple_assign_rhs_code (stmt));
967 }
968 else if (gimple_assign_single_p (stmt)
969 && !detect_type_change_ssa (tc_ssa, call, jfunc))
970 {
971 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
972 call, tc_ssa);
973 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
974 }
975 return;
976 }
977
978 if (TREE_CODE (op1) != ADDR_EXPR)
979 return;
980 op1 = TREE_OPERAND (op1, 0);
981 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
982 return;
983 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
984 if (TREE_CODE (base) != MEM_REF
985 /* If this is a varying address, punt. */
986 || max_size == -1
987 || max_size != size)
988 return;
989 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
990 ssa = TREE_OPERAND (base, 0);
991 if (TREE_CODE (ssa) != SSA_NAME
992 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
993 || offset < 0)
994 return;
995
996 /* Dynamic types are changed only in constructors and destructors and */
997 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
998 if (index >= 0
999 && !detect_type_change (op1, base, call, jfunc, offset))
1000 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1001 parm_ref_data_pass_through_p (&parms_ainfo[index],
1002 call, ssa));
1003 }
1004
1005 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1006 it looks like:
1007
1008 iftmp.1_3 = &obj_2(D)->D.1762;
1009
1010 The base of the MEM_REF must be a default definition SSA NAME of a
1011 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1012 whole MEM_REF expression is returned and the offset calculated from any
1013 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1014 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1015
1016 static tree
1017 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1018 {
1019 HOST_WIDE_INT size, max_size;
1020 tree expr, parm, obj;
1021
1022 if (!gimple_assign_single_p (assign))
1023 return NULL_TREE;
1024 expr = gimple_assign_rhs1 (assign);
1025
1026 if (TREE_CODE (expr) != ADDR_EXPR)
1027 return NULL_TREE;
1028 expr = TREE_OPERAND (expr, 0);
1029 obj = expr;
1030 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1031
1032 if (TREE_CODE (expr) != MEM_REF
1033 /* If this is a varying address, punt. */
1034 || max_size == -1
1035 || max_size != size
1036 || *offset < 0)
1037 return NULL_TREE;
1038 parm = TREE_OPERAND (expr, 0);
1039 if (TREE_CODE (parm) != SSA_NAME
1040 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1041 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1042 return NULL_TREE;
1043
1044 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1045 *obj_p = obj;
1046 return expr;
1047 }
1048
1049
1050 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1051 statement PHI, try to find out whether NAME is in fact a
1052 multiple-inheritance typecast from a descendant into an ancestor of a formal
1053 parameter and thus can be described by an ancestor jump function and if so,
1054 write the appropriate function into JFUNC.
1055
1056 Essentially we want to match the following pattern:
1057
1058 if (obj_2(D) != 0B)
1059 goto <bb 3>;
1060 else
1061 goto <bb 4>;
1062
1063 <bb 3>:
1064 iftmp.1_3 = &obj_2(D)->D.1762;
1065
1066 <bb 4>:
1067 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1068 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1069 return D.1879_6; */
1070
1071 static void
1072 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1073 struct param_analysis_info *parms_ainfo,
1074 struct ipa_jump_func *jfunc,
1075 gimple call, gimple phi)
1076 {
1077 HOST_WIDE_INT offset;
1078 gimple assign, cond;
1079 basic_block phi_bb, assign_bb, cond_bb;
1080 tree tmp, parm, expr, obj;
1081 int index, i;
1082
1083 if (gimple_phi_num_args (phi) != 2)
1084 return;
1085
1086 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1087 tmp = PHI_ARG_DEF (phi, 0);
1088 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1089 tmp = PHI_ARG_DEF (phi, 1);
1090 else
1091 return;
1092 if (TREE_CODE (tmp) != SSA_NAME
1093 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1094 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1095 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1096 return;
1097
1098 assign = SSA_NAME_DEF_STMT (tmp);
1099 assign_bb = gimple_bb (assign);
1100 if (!single_pred_p (assign_bb))
1101 return;
1102 expr = get_ancestor_addr_info (assign, &obj, &offset);
1103 if (!expr)
1104 return;
1105 parm = TREE_OPERAND (expr, 0);
1106 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1107 gcc_assert (index >= 0);
1108
1109 cond_bb = single_pred (assign_bb);
1110 cond = last_stmt (cond_bb);
1111 if (!cond
1112 || gimple_code (cond) != GIMPLE_COND
1113 || gimple_cond_code (cond) != NE_EXPR
1114 || gimple_cond_lhs (cond) != parm
1115 || !integer_zerop (gimple_cond_rhs (cond)))
1116 return;
1117
1118 phi_bb = gimple_bb (phi);
1119 for (i = 0; i < 2; i++)
1120 {
1121 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1122 if (pred != assign_bb && pred != cond_bb)
1123 return;
1124 }
1125
1126 if (!detect_type_change (obj, expr, call, jfunc, offset))
1127 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1128 parm_ref_data_pass_through_p (&parms_ainfo[index],
1129 call, parm));
1130 }
1131
1132 /* Given OP which is passed as an actual argument to a called function,
1133 determine if it is possible to construct a KNOWN_TYPE jump function for it
1134 and if so, create one and store it to JFUNC. */
1135
1136 static void
1137 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1138 gimple call)
1139 {
1140 HOST_WIDE_INT offset, size, max_size;
1141 tree base;
1142
1143 if (!flag_devirtualize
1144 || TREE_CODE (op) != ADDR_EXPR
1145 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1146 return;
1147
1148 op = TREE_OPERAND (op, 0);
1149 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1150 if (!DECL_P (base)
1151 || max_size == -1
1152 || max_size != size
1153 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1154 || is_global_var (base))
1155 return;
1156
1157 if (!TYPE_BINFO (TREE_TYPE (base))
1158 || detect_type_change (op, base, call, jfunc, offset))
1159 return;
1160
1161 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1162 }
1163
1164 /* Inspect the given TYPE and return true iff it has the same structure (the
1165 same number of fields of the same types) as a C++ member pointer. If
1166 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1167 corresponding fields there. */
1168
1169 static bool
1170 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1171 {
1172 tree fld;
1173
1174 if (TREE_CODE (type) != RECORD_TYPE)
1175 return false;
1176
1177 fld = TYPE_FIELDS (type);
1178 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1179 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1180 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1181 return false;
1182
1183 if (method_ptr)
1184 *method_ptr = fld;
1185
1186 fld = DECL_CHAIN (fld);
1187 if (!fld || INTEGRAL_TYPE_P (fld)
1188 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1189 return false;
1190 if (delta)
1191 *delta = fld;
1192
1193 if (DECL_CHAIN (fld))
1194 return false;
1195
1196 return true;
1197 }
1198
1199 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1200 return the rhs of its defining statement. Otherwise return RHS as it
1201 is. */
1202
1203 static inline tree
1204 get_ssa_def_if_simple_copy (tree rhs)
1205 {
1206 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1207 {
1208 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1209
1210 if (gimple_assign_single_p (def_stmt))
1211 rhs = gimple_assign_rhs1 (def_stmt);
1212 else
1213 break;
1214 }
1215 return rhs;
1216 }
1217
1218 /* Simple linked list, describing known contents of an aggregate beforere
1219 call. */
1220
1221 struct ipa_known_agg_contents_list
1222 {
1223 /* Offset and size of the described part of the aggregate. */
1224 HOST_WIDE_INT offset, size;
1225 /* Known constant value or NULL if the contents is known to be unknown. */
1226 tree constant;
1227 /* Pointer to the next structure in the list. */
1228 struct ipa_known_agg_contents_list *next;
1229 };
1230
1231 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1232 in ARG is filled in with constant values. ARG can either be an aggregate
1233 expression or a pointer to an aggregate. JFUNC is the jump function into
1234 which the constants are subsequently stored. */
1235
1236 static void
1237 determine_known_aggregate_parts (gimple call, tree arg,
1238 struct ipa_jump_func *jfunc)
1239 {
1240 struct ipa_known_agg_contents_list *list = NULL;
1241 int item_count = 0, const_count = 0;
1242 HOST_WIDE_INT arg_offset, arg_size;
1243 gimple_stmt_iterator gsi;
1244 tree arg_base;
1245 bool check_ref, by_ref;
1246 ao_ref r;
1247
1248 /* The function operates in three stages. First, we prepare check_ref, r,
1249 arg_base and arg_offset based on what is actually passed as an actual
1250 argument. */
1251
1252 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1253 {
1254 by_ref = true;
1255 if (TREE_CODE (arg) == SSA_NAME)
1256 {
1257 tree type_size;
1258 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1259 return;
1260 check_ref = true;
1261 arg_base = arg;
1262 arg_offset = 0;
1263 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1264 arg_size = tree_low_cst (type_size, 1);
1265 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1266 }
1267 else if (TREE_CODE (arg) == ADDR_EXPR)
1268 {
1269 HOST_WIDE_INT arg_max_size;
1270
1271 arg = TREE_OPERAND (arg, 0);
1272 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1273 &arg_max_size);
1274 if (arg_max_size == -1
1275 || arg_max_size != arg_size
1276 || arg_offset < 0)
1277 return;
1278 if (DECL_P (arg_base))
1279 {
1280 tree size;
1281 check_ref = false;
1282 size = build_int_cst (integer_type_node, arg_size);
1283 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1284 }
1285 else
1286 return;
1287 }
1288 else
1289 return;
1290 }
1291 else
1292 {
1293 HOST_WIDE_INT arg_max_size;
1294
1295 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1296
1297 by_ref = false;
1298 check_ref = false;
1299 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1300 &arg_max_size);
1301 if (arg_max_size == -1
1302 || arg_max_size != arg_size
1303 || arg_offset < 0)
1304 return;
1305
1306 ao_ref_init (&r, arg);
1307 }
1308
1309 /* Second stage walks back the BB, looks at individual statements and as long
1310 as it is confident of how the statements affect contents of the
1311 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1312 describing it. */
1313 gsi = gsi_for_stmt (call);
1314 gsi_prev (&gsi);
1315 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1316 {
1317 struct ipa_known_agg_contents_list *n, **p;
1318 gimple stmt = gsi_stmt (gsi);
1319 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1320 tree lhs, rhs, lhs_base;
1321 bool partial_overlap;
1322
1323 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1324 continue;
1325 if (!gimple_assign_single_p (stmt))
1326 break;
1327
1328 lhs = gimple_assign_lhs (stmt);
1329 rhs = gimple_assign_rhs1 (stmt);
1330 if (!is_gimple_reg_type (rhs)
1331 || TREE_CODE (lhs) == BIT_FIELD_REF
1332 || contains_bitfld_component_ref_p (lhs))
1333 break;
1334
1335 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1336 &lhs_max_size);
1337 if (lhs_max_size == -1
1338 || lhs_max_size != lhs_size
1339 || (lhs_offset < arg_offset
1340 && lhs_offset + lhs_size > arg_offset)
1341 || (lhs_offset < arg_offset + arg_size
1342 && lhs_offset + lhs_size > arg_offset + arg_size))
1343 break;
1344
1345 if (check_ref)
1346 {
1347 if (TREE_CODE (lhs_base) != MEM_REF
1348 || TREE_OPERAND (lhs_base, 0) != arg_base
1349 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1350 break;
1351 }
1352 else if (lhs_base != arg_base)
1353 {
1354 if (DECL_P (lhs_base))
1355 continue;
1356 else
1357 break;
1358 }
1359
1360 if (lhs_offset + lhs_size < arg_offset
1361 || lhs_offset >= (arg_offset + arg_size))
1362 continue;
1363
1364 partial_overlap = false;
1365 p = &list;
1366 while (*p && (*p)->offset < lhs_offset)
1367 {
1368 if ((*p)->offset + (*p)->size > lhs_offset)
1369 {
1370 partial_overlap = true;
1371 break;
1372 }
1373 p = &(*p)->next;
1374 }
1375 if (partial_overlap)
1376 break;
1377 if (*p && (*p)->offset < lhs_offset + lhs_size)
1378 {
1379 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1380 /* We already know this value is subsequently overwritten with
1381 something else. */
1382 continue;
1383 else
1384 /* Otherwise this is a partial overlap which we cannot
1385 represent. */
1386 break;
1387 }
1388
1389 rhs = get_ssa_def_if_simple_copy (rhs);
1390 n = XALLOCA (struct ipa_known_agg_contents_list);
1391 n->size = lhs_size;
1392 n->offset = lhs_offset;
1393 if (is_gimple_ip_invariant (rhs))
1394 {
1395 n->constant = rhs;
1396 const_count++;
1397 }
1398 else
1399 n->constant = NULL_TREE;
1400 n->next = *p;
1401 *p = n;
1402
1403 item_count++;
1404 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1405 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1406 break;
1407 }
1408
1409 /* Third stage just goes over the list and creates an appropriate vector of
1410 ipa_agg_jf_item structures out of it, of sourse only if there are
1411 any known constants to begin with. */
1412
1413 if (const_count)
1414 {
1415 jfunc->agg.by_ref = by_ref;
1416 vec_alloc (jfunc->agg.items, const_count);
1417 while (list)
1418 {
1419 if (list->constant)
1420 {
1421 struct ipa_agg_jf_item item;
1422 item.offset = list->offset - arg_offset;
1423 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1424 item.value = unshare_expr_without_location (list->constant);
1425 jfunc->agg.items->quick_push (item);
1426 }
1427 list = list->next;
1428 }
1429 }
1430 }
1431
1432 /* Compute jump function for all arguments of callsite CS and insert the
1433 information in the jump_functions array in the ipa_edge_args corresponding
1434 to this callsite. */
1435
1436 static void
1437 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1438 struct cgraph_edge *cs)
1439 {
1440 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1441 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1442 gimple call = cs->call_stmt;
1443 int n, arg_num = gimple_call_num_args (call);
1444
1445 if (arg_num == 0 || args->jump_functions)
1446 return;
1447 vec_safe_grow_cleared (args->jump_functions, arg_num);
1448
1449 for (n = 0; n < arg_num; n++)
1450 {
1451 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1452 tree arg = gimple_call_arg (call, n);
1453
1454 if (is_gimple_ip_invariant (arg))
1455 ipa_set_jf_constant (jfunc, arg, cs);
1456 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1457 && TREE_CODE (arg) == PARM_DECL)
1458 {
1459 int index = ipa_get_param_decl_index (info, arg);
1460
1461 gcc_assert (index >=0);
1462 /* Aggregate passed by value, check for pass-through, otherwise we
1463 will attempt to fill in aggregate contents later in this
1464 for cycle. */
1465 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1466 {
1467 ipa_set_jf_simple_pass_through (jfunc, index, false);
1468 continue;
1469 }
1470 }
1471 else if (TREE_CODE (arg) == SSA_NAME)
1472 {
1473 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1474 {
1475 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1476 if (index >= 0
1477 && !detect_type_change_ssa (arg, call, jfunc))
1478 {
1479 bool agg_p;
1480 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1481 call, arg);
1482 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1483 }
1484 }
1485 else
1486 {
1487 gimple stmt = SSA_NAME_DEF_STMT (arg);
1488 if (is_gimple_assign (stmt))
1489 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1490 call, stmt, arg);
1491 else if (gimple_code (stmt) == GIMPLE_PHI)
1492 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1493 call, stmt);
1494 }
1495 }
1496 else
1497 compute_known_type_jump_func (arg, jfunc, call);
1498
1499 if ((jfunc->type != IPA_JF_PASS_THROUGH
1500 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1501 && (jfunc->type != IPA_JF_ANCESTOR
1502 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1503 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1504 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1505 determine_known_aggregate_parts (call, arg, jfunc);
1506 }
1507 }
1508
1509 /* Compute jump functions for all edges - both direct and indirect - outgoing
1510 from NODE. Also count the actual arguments in the process. */
1511
1512 static void
1513 ipa_compute_jump_functions (struct cgraph_node *node,
1514 struct param_analysis_info *parms_ainfo)
1515 {
1516 struct cgraph_edge *cs;
1517
1518 for (cs = node->callees; cs; cs = cs->next_callee)
1519 {
1520 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1521 NULL);
1522 /* We do not need to bother analyzing calls to unknown
1523 functions unless they may become known during lto/whopr. */
1524 if (!callee->symbol.definition && !flag_lto)
1525 continue;
1526 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1527 }
1528
1529 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1530 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1531 }
1532
1533 /* If STMT looks like a statement loading a value from a member pointer formal
1534 parameter, return that parameter and store the offset of the field to
1535 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1536 might be clobbered). If USE_DELTA, then we look for a use of the delta
1537 field rather than the pfn. */
1538
1539 static tree
1540 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1541 HOST_WIDE_INT *offset_p)
1542 {
1543 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1544
1545 if (!gimple_assign_single_p (stmt))
1546 return NULL_TREE;
1547
1548 rhs = gimple_assign_rhs1 (stmt);
1549 if (TREE_CODE (rhs) == COMPONENT_REF)
1550 {
1551 ref_field = TREE_OPERAND (rhs, 1);
1552 rhs = TREE_OPERAND (rhs, 0);
1553 }
1554 else
1555 ref_field = NULL_TREE;
1556 if (TREE_CODE (rhs) != MEM_REF)
1557 return NULL_TREE;
1558 rec = TREE_OPERAND (rhs, 0);
1559 if (TREE_CODE (rec) != ADDR_EXPR)
1560 return NULL_TREE;
1561 rec = TREE_OPERAND (rec, 0);
1562 if (TREE_CODE (rec) != PARM_DECL
1563 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1564 return NULL_TREE;
1565 ref_offset = TREE_OPERAND (rhs, 1);
1566
1567 if (use_delta)
1568 fld = delta_field;
1569 else
1570 fld = ptr_field;
1571 if (offset_p)
1572 *offset_p = int_bit_position (fld);
1573
1574 if (ref_field)
1575 {
1576 if (integer_nonzerop (ref_offset))
1577 return NULL_TREE;
1578 return ref_field == fld ? rec : NULL_TREE;
1579 }
1580 else
1581 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1582 : NULL_TREE;
1583 }
1584
1585 /* Returns true iff T is an SSA_NAME defined by a statement. */
1586
1587 static bool
1588 ipa_is_ssa_with_stmt_def (tree t)
1589 {
1590 if (TREE_CODE (t) == SSA_NAME
1591 && !SSA_NAME_IS_DEFAULT_DEF (t))
1592 return true;
1593 else
1594 return false;
1595 }
1596
1597 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1598 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1599 indirect call graph edge. */
1600
1601 static struct cgraph_edge *
1602 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1603 {
1604 struct cgraph_edge *cs;
1605
1606 cs = cgraph_edge (node, stmt);
1607 cs->indirect_info->param_index = param_index;
1608 cs->indirect_info->offset = 0;
1609 cs->indirect_info->polymorphic = 0;
1610 cs->indirect_info->agg_contents = 0;
1611 return cs;
1612 }
1613
1614 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1615 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1616 intermediate information about each formal parameter. Currently it checks
1617 whether the call calls a pointer that is a formal parameter and if so, the
1618 parameter is marked with the called flag and an indirect call graph edge
1619 describing the call is created. This is very simple for ordinary pointers
1620 represented in SSA but not-so-nice when it comes to member pointers. The
1621 ugly part of this function does nothing more than trying to match the
1622 pattern of such a call. An example of such a pattern is the gimple dump
1623 below, the call is on the last line:
1624
1625 <bb 2>:
1626 f$__delta_5 = f.__delta;
1627 f$__pfn_24 = f.__pfn;
1628
1629 or
1630 <bb 2>:
1631 f$__delta_5 = MEM[(struct *)&f];
1632 f$__pfn_24 = MEM[(struct *)&f + 4B];
1633
1634 and a few lines below:
1635
1636 <bb 5>
1637 D.2496_3 = (int) f$__pfn_24;
1638 D.2497_4 = D.2496_3 & 1;
1639 if (D.2497_4 != 0)
1640 goto <bb 3>;
1641 else
1642 goto <bb 4>;
1643
1644 <bb 6>:
1645 D.2500_7 = (unsigned int) f$__delta_5;
1646 D.2501_8 = &S + D.2500_7;
1647 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1648 D.2503_10 = *D.2502_9;
1649 D.2504_12 = f$__pfn_24 + -1;
1650 D.2505_13 = (unsigned int) D.2504_12;
1651 D.2506_14 = D.2503_10 + D.2505_13;
1652 D.2507_15 = *D.2506_14;
1653 iftmp.11_16 = (String:: *) D.2507_15;
1654
1655 <bb 7>:
1656 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1657 D.2500_19 = (unsigned int) f$__delta_5;
1658 D.2508_20 = &S + D.2500_19;
1659 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1660
1661 Such patterns are results of simple calls to a member pointer:
1662
1663 int doprinting (int (MyString::* f)(int) const)
1664 {
1665 MyString S ("somestring");
1666
1667 return (S.*f)(4);
1668 }
1669
1670 Moreover, the function also looks for called pointers loaded from aggregates
1671 passed by value or reference. */
1672
1673 static void
1674 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1675 struct ipa_node_params *info,
1676 struct param_analysis_info *parms_ainfo,
1677 gimple call, tree target)
1678 {
1679 gimple def;
1680 tree n1, n2;
1681 gimple d1, d2;
1682 tree rec, rec2, cond;
1683 gimple branch;
1684 int index;
1685 basic_block bb, virt_bb, join;
1686 HOST_WIDE_INT offset;
1687 bool by_ref;
1688
1689 if (SSA_NAME_IS_DEFAULT_DEF (target))
1690 {
1691 tree var = SSA_NAME_VAR (target);
1692 index = ipa_get_param_decl_index (info, var);
1693 if (index >= 0)
1694 ipa_note_param_call (node, index, call);
1695 return;
1696 }
1697
1698 def = SSA_NAME_DEF_STMT (target);
1699 if (gimple_assign_single_p (def)
1700 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1701 gimple_assign_rhs1 (def), &index, &offset,
1702 &by_ref))
1703 {
1704 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1705 cs->indirect_info->offset = offset;
1706 cs->indirect_info->agg_contents = 1;
1707 cs->indirect_info->by_ref = by_ref;
1708 return;
1709 }
1710
1711 /* Now we need to try to match the complex pattern of calling a member
1712 pointer. */
1713 if (gimple_code (def) != GIMPLE_PHI
1714 || gimple_phi_num_args (def) != 2
1715 || !POINTER_TYPE_P (TREE_TYPE (target))
1716 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1717 return;
1718
1719 /* First, we need to check whether one of these is a load from a member
1720 pointer that is a parameter to this function. */
1721 n1 = PHI_ARG_DEF (def, 0);
1722 n2 = PHI_ARG_DEF (def, 1);
1723 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1724 return;
1725 d1 = SSA_NAME_DEF_STMT (n1);
1726 d2 = SSA_NAME_DEF_STMT (n2);
1727
1728 join = gimple_bb (def);
1729 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1730 {
1731 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1732 return;
1733
1734 bb = EDGE_PRED (join, 0)->src;
1735 virt_bb = gimple_bb (d2);
1736 }
1737 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1738 {
1739 bb = EDGE_PRED (join, 1)->src;
1740 virt_bb = gimple_bb (d1);
1741 }
1742 else
1743 return;
1744
1745 /* Second, we need to check that the basic blocks are laid out in the way
1746 corresponding to the pattern. */
1747
1748 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1749 || single_pred (virt_bb) != bb
1750 || single_succ (virt_bb) != join)
1751 return;
1752
1753 /* Third, let's see that the branching is done depending on the least
1754 significant bit of the pfn. */
1755
1756 branch = last_stmt (bb);
1757 if (!branch || gimple_code (branch) != GIMPLE_COND)
1758 return;
1759
1760 if ((gimple_cond_code (branch) != NE_EXPR
1761 && gimple_cond_code (branch) != EQ_EXPR)
1762 || !integer_zerop (gimple_cond_rhs (branch)))
1763 return;
1764
1765 cond = gimple_cond_lhs (branch);
1766 if (!ipa_is_ssa_with_stmt_def (cond))
1767 return;
1768
1769 def = SSA_NAME_DEF_STMT (cond);
1770 if (!is_gimple_assign (def)
1771 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1772 || !integer_onep (gimple_assign_rhs2 (def)))
1773 return;
1774
1775 cond = gimple_assign_rhs1 (def);
1776 if (!ipa_is_ssa_with_stmt_def (cond))
1777 return;
1778
1779 def = SSA_NAME_DEF_STMT (cond);
1780
1781 if (is_gimple_assign (def)
1782 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1783 {
1784 cond = gimple_assign_rhs1 (def);
1785 if (!ipa_is_ssa_with_stmt_def (cond))
1786 return;
1787 def = SSA_NAME_DEF_STMT (cond);
1788 }
1789
1790 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1791 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1792 == ptrmemfunc_vbit_in_delta),
1793 NULL);
1794 if (rec != rec2)
1795 return;
1796
1797 index = ipa_get_param_decl_index (info, rec);
1798 if (index >= 0
1799 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1800 {
1801 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1802 cs->indirect_info->offset = offset;
1803 cs->indirect_info->agg_contents = 1;
1804 }
1805
1806 return;
1807 }
1808
1809 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1810 object referenced in the expression is a formal parameter of the caller
1811 (described by INFO), create a call note for the statement. */
1812
1813 static void
1814 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1815 struct ipa_node_params *info, gimple call,
1816 tree target)
1817 {
1818 struct cgraph_edge *cs;
1819 struct cgraph_indirect_call_info *ii;
1820 struct ipa_jump_func jfunc;
1821 tree obj = OBJ_TYPE_REF_OBJECT (target);
1822 int index;
1823 HOST_WIDE_INT anc_offset;
1824
1825 if (!flag_devirtualize)
1826 return;
1827
1828 if (TREE_CODE (obj) != SSA_NAME)
1829 return;
1830
1831 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1832 {
1833 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1834 return;
1835
1836 anc_offset = 0;
1837 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1838 gcc_assert (index >= 0);
1839 if (detect_type_change_ssa (obj, call, &jfunc))
1840 return;
1841 }
1842 else
1843 {
1844 gimple stmt = SSA_NAME_DEF_STMT (obj);
1845 tree expr;
1846
1847 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1848 if (!expr)
1849 return;
1850 index = ipa_get_param_decl_index (info,
1851 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1852 gcc_assert (index >= 0);
1853 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1854 return;
1855 }
1856
1857 cs = ipa_note_param_call (node, index, call);
1858 ii = cs->indirect_info;
1859 ii->offset = anc_offset;
1860 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1861 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1862 ii->polymorphic = 1;
1863 }
1864
1865 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1866 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1867 containing intermediate information about each formal parameter. */
1868
1869 static void
1870 ipa_analyze_call_uses (struct cgraph_node *node,
1871 struct ipa_node_params *info,
1872 struct param_analysis_info *parms_ainfo, gimple call)
1873 {
1874 tree target = gimple_call_fn (call);
1875
1876 if (!target)
1877 return;
1878 if (TREE_CODE (target) == SSA_NAME)
1879 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1880 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1881 ipa_analyze_virtual_call_uses (node, info, call, target);
1882 }
1883
1884
1885 /* Analyze the call statement STMT with respect to formal parameters (described
1886 in INFO) of caller given by NODE. Currently it only checks whether formal
1887 parameters are called. PARMS_AINFO is a pointer to a vector containing
1888 intermediate information about each formal parameter. */
1889
1890 static void
1891 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1892 struct param_analysis_info *parms_ainfo, gimple stmt)
1893 {
1894 if (is_gimple_call (stmt))
1895 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1896 }
1897
1898 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1899 If OP is a parameter declaration, mark it as used in the info structure
1900 passed in DATA. */
1901
1902 static bool
1903 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1904 tree op, void *data)
1905 {
1906 struct ipa_node_params *info = (struct ipa_node_params *) data;
1907
1908 op = get_base_address (op);
1909 if (op
1910 && TREE_CODE (op) == PARM_DECL)
1911 {
1912 int index = ipa_get_param_decl_index (info, op);
1913 gcc_assert (index >= 0);
1914 ipa_set_param_used (info, index, true);
1915 }
1916
1917 return false;
1918 }
1919
1920 /* Scan the function body of NODE and inspect the uses of formal parameters.
1921 Store the findings in various structures of the associated ipa_node_params
1922 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1923 vector containing intermediate information about each formal parameter. */
1924
1925 static void
1926 ipa_analyze_params_uses (struct cgraph_node *node,
1927 struct param_analysis_info *parms_ainfo)
1928 {
1929 tree decl = node->symbol.decl;
1930 basic_block bb;
1931 struct function *func;
1932 gimple_stmt_iterator gsi;
1933 struct ipa_node_params *info = IPA_NODE_REF (node);
1934 int i;
1935
1936 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1937 return;
1938
1939 for (i = 0; i < ipa_get_param_count (info); i++)
1940 {
1941 tree parm = ipa_get_param (info, i);
1942 int controlled_uses = 0;
1943
1944 /* For SSA regs see if parameter is used. For non-SSA we compute
1945 the flag during modification analysis. */
1946 if (is_gimple_reg (parm))
1947 {
1948 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1949 parm);
1950 if (ddef && !has_zero_uses (ddef))
1951 {
1952 imm_use_iterator imm_iter;
1953 use_operand_p use_p;
1954
1955 ipa_set_param_used (info, i, true);
1956 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
1957 if (!is_gimple_call (USE_STMT (use_p)))
1958 {
1959 controlled_uses = IPA_UNDESCRIBED_USE;
1960 break;
1961 }
1962 else
1963 controlled_uses++;
1964 }
1965 else
1966 controlled_uses = 0;
1967 }
1968 else
1969 controlled_uses = IPA_UNDESCRIBED_USE;
1970 ipa_set_controlled_uses (info, i, controlled_uses);
1971 }
1972
1973 func = DECL_STRUCT_FUNCTION (decl);
1974 FOR_EACH_BB_FN (bb, func)
1975 {
1976 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1977 {
1978 gimple stmt = gsi_stmt (gsi);
1979
1980 if (is_gimple_debug (stmt))
1981 continue;
1982
1983 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1984 walk_stmt_load_store_addr_ops (stmt, info,
1985 visit_ref_for_mod_analysis,
1986 visit_ref_for_mod_analysis,
1987 visit_ref_for_mod_analysis);
1988 }
1989 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1990 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1991 visit_ref_for_mod_analysis,
1992 visit_ref_for_mod_analysis,
1993 visit_ref_for_mod_analysis);
1994 }
1995
1996 info->uses_analysis_done = 1;
1997 }
1998
1999 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2000
2001 static void
2002 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2003 {
2004 int i;
2005
2006 for (i = 0; i < param_count; i++)
2007 {
2008 if (parms_ainfo[i].parm_visited_statements)
2009 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2010 if (parms_ainfo[i].pt_visited_statements)
2011 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2012 }
2013 }
2014
2015 /* Initialize the array describing properties of of formal parameters
2016 of NODE, analyze their uses and compute jump functions associated
2017 with actual arguments of calls from within NODE. */
2018
2019 void
2020 ipa_analyze_node (struct cgraph_node *node)
2021 {
2022 struct ipa_node_params *info;
2023 struct param_analysis_info *parms_ainfo;
2024 int param_count;
2025
2026 ipa_check_create_node_params ();
2027 ipa_check_create_edge_args ();
2028 info = IPA_NODE_REF (node);
2029 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
2030 ipa_initialize_node_params (node);
2031
2032 param_count = ipa_get_param_count (info);
2033 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2034 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2035
2036 ipa_analyze_params_uses (node, parms_ainfo);
2037 ipa_compute_jump_functions (node, parms_ainfo);
2038
2039 free_parms_ainfo (parms_ainfo, param_count);
2040 pop_cfun ();
2041 }
2042
2043 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2044 attempt a type-based devirtualization. If successful, return the
2045 target function declaration, otherwise return NULL. */
2046
2047 tree
2048 ipa_intraprocedural_devirtualization (gimple call)
2049 {
2050 tree binfo, token, fndecl;
2051 struct ipa_jump_func jfunc;
2052 tree otr = gimple_call_fn (call);
2053
2054 jfunc.type = IPA_JF_UNKNOWN;
2055 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2056 call);
2057 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2058 return NULL_TREE;
2059 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2060 if (!binfo)
2061 return NULL_TREE;
2062 token = OBJ_TYPE_REF_TOKEN (otr);
2063 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2064 binfo);
2065 return fndecl;
2066 }
2067
2068 /* Update the jump function DST when the call graph edge corresponding to SRC is
2069 is being inlined, knowing that DST is of type ancestor and src of known
2070 type. */
2071
2072 static void
2073 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2074 struct ipa_jump_func *dst)
2075 {
2076 HOST_WIDE_INT combined_offset;
2077 tree combined_type;
2078
2079 combined_offset = ipa_get_jf_known_type_offset (src)
2080 + ipa_get_jf_ancestor_offset (dst);
2081 combined_type = ipa_get_jf_ancestor_type (dst);
2082
2083 ipa_set_jf_known_type (dst, combined_offset,
2084 ipa_get_jf_known_type_base_type (src),
2085 combined_type);
2086 }
2087
2088 /* Update the jump functions associated with call graph edge E when the call
2089 graph edge CS is being inlined, assuming that E->caller is already (possibly
2090 indirectly) inlined into CS->callee and that E has not been inlined. */
2091
2092 static void
2093 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2094 struct cgraph_edge *e)
2095 {
2096 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2097 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2098 int count = ipa_get_cs_argument_count (args);
2099 int i;
2100
2101 for (i = 0; i < count; i++)
2102 {
2103 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2104
2105 if (dst->type == IPA_JF_ANCESTOR)
2106 {
2107 struct ipa_jump_func *src;
2108 int dst_fid = dst->value.ancestor.formal_id;
2109
2110 /* Variable number of arguments can cause havoc if we try to access
2111 one that does not exist in the inlined edge. So make sure we
2112 don't. */
2113 if (dst_fid >= ipa_get_cs_argument_count (top))
2114 {
2115 dst->type = IPA_JF_UNKNOWN;
2116 continue;
2117 }
2118
2119 src = ipa_get_ith_jump_func (top, dst_fid);
2120
2121 if (src->agg.items
2122 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2123 {
2124 struct ipa_agg_jf_item *item;
2125 int j;
2126
2127 /* Currently we do not produce clobber aggregate jump functions,
2128 replace with merging when we do. */
2129 gcc_assert (!dst->agg.items);
2130
2131 dst->agg.items = vec_safe_copy (src->agg.items);
2132 dst->agg.by_ref = src->agg.by_ref;
2133 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2134 item->offset -= dst->value.ancestor.offset;
2135 }
2136
2137 if (src->type == IPA_JF_KNOWN_TYPE)
2138 combine_known_type_and_ancestor_jfs (src, dst);
2139 else if (src->type == IPA_JF_PASS_THROUGH
2140 && src->value.pass_through.operation == NOP_EXPR)
2141 {
2142 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2143 dst->value.ancestor.agg_preserved &=
2144 src->value.pass_through.agg_preserved;
2145 }
2146 else if (src->type == IPA_JF_ANCESTOR)
2147 {
2148 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2149 dst->value.ancestor.offset += src->value.ancestor.offset;
2150 dst->value.ancestor.agg_preserved &=
2151 src->value.ancestor.agg_preserved;
2152 }
2153 else
2154 dst->type = IPA_JF_UNKNOWN;
2155 }
2156 else if (dst->type == IPA_JF_PASS_THROUGH)
2157 {
2158 struct ipa_jump_func *src;
2159 /* We must check range due to calls with variable number of arguments
2160 and we cannot combine jump functions with operations. */
2161 if (dst->value.pass_through.operation == NOP_EXPR
2162 && (dst->value.pass_through.formal_id
2163 < ipa_get_cs_argument_count (top)))
2164 {
2165 bool agg_p;
2166 int dst_fid = dst->value.pass_through.formal_id;
2167 src = ipa_get_ith_jump_func (top, dst_fid);
2168 agg_p = dst->value.pass_through.agg_preserved;
2169
2170 dst->type = src->type;
2171 dst->value = src->value;
2172
2173 if (src->agg.items
2174 && (agg_p || !src->agg.by_ref))
2175 {
2176 /* Currently we do not produce clobber aggregate jump
2177 functions, replace with merging when we do. */
2178 gcc_assert (!dst->agg.items);
2179
2180 dst->agg.by_ref = src->agg.by_ref;
2181 dst->agg.items = vec_safe_copy (src->agg.items);
2182 }
2183
2184 if (!agg_p)
2185 {
2186 if (dst->type == IPA_JF_PASS_THROUGH)
2187 dst->value.pass_through.agg_preserved = false;
2188 else if (dst->type == IPA_JF_ANCESTOR)
2189 dst->value.ancestor.agg_preserved = false;
2190 }
2191 }
2192 else
2193 dst->type = IPA_JF_UNKNOWN;
2194 }
2195 }
2196 }
2197
2198 /* If TARGET is an addr_expr of a function declaration, make it the destination
2199 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2200
2201 struct cgraph_edge *
2202 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2203 {
2204 struct cgraph_node *callee;
2205 struct inline_edge_summary *es = inline_edge_summary (ie);
2206 bool unreachable = false;
2207
2208 if (TREE_CODE (target) == ADDR_EXPR)
2209 target = TREE_OPERAND (target, 0);
2210 if (TREE_CODE (target) != FUNCTION_DECL)
2211 {
2212 target = canonicalize_constructor_val (target, NULL);
2213 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2214 {
2215 if (dump_file)
2216 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2217 " in %s/%i, making it unreachable.\n",
2218 cgraph_node_name (ie->caller), ie->caller->symbol.order);
2219 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2220 callee = cgraph_get_create_node (target);
2221 unreachable = true;
2222 }
2223 else
2224 callee = cgraph_get_node (target);
2225 }
2226 else
2227 callee = cgraph_get_node (target);
2228
2229 /* Because may-edges are not explicitely represented and vtable may be external,
2230 we may create the first reference to the object in the unit. */
2231 if (!callee || callee->global.inlined_to)
2232 {
2233
2234 /* We are better to ensure we can refer to it.
2235 In the case of static functions we are out of luck, since we already
2236 removed its body. In the case of public functions we may or may
2237 not introduce the reference. */
2238 if (!canonicalize_constructor_val (target, NULL)
2239 || !TREE_PUBLIC (target))
2240 {
2241 if (dump_file)
2242 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2243 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2244 xstrdup (cgraph_node_name (ie->caller)),
2245 ie->caller->symbol.order,
2246 xstrdup (cgraph_node_name (ie->callee)),
2247 ie->callee->symbol.order);
2248 return NULL;
2249 }
2250 callee = cgraph_get_create_real_symbol_node (target);
2251 }
2252 ipa_check_create_node_params ();
2253
2254 /* We can not make edges to inline clones. It is bug that someone removed
2255 the cgraph node too early. */
2256 gcc_assert (!callee->global.inlined_to);
2257
2258 cgraph_make_edge_direct (ie, callee);
2259 es = inline_edge_summary (ie);
2260 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2261 - eni_size_weights.call_cost);
2262 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2263 - eni_time_weights.call_cost);
2264 if (dump_file && !unreachable)
2265 {
2266 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2267 "(%s/%i -> %s/%i), for stmt ",
2268 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2269 xstrdup (cgraph_node_name (ie->caller)),
2270 ie->caller->symbol.order,
2271 xstrdup (cgraph_node_name (ie->callee)),
2272 ie->callee->symbol.order);
2273 if (ie->call_stmt)
2274 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2275 else
2276 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2277 }
2278 callee = cgraph_function_or_thunk_node (callee, NULL);
2279
2280 return ie;
2281 }
2282
2283 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2284 return NULL if there is not any. BY_REF specifies whether the value has to
2285 be passed by reference or by value. */
2286
2287 tree
2288 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2289 HOST_WIDE_INT offset, bool by_ref)
2290 {
2291 struct ipa_agg_jf_item *item;
2292 int i;
2293
2294 if (by_ref != agg->by_ref)
2295 return NULL;
2296
2297 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2298 if (item->offset == offset)
2299 {
2300 /* Currently we do not have clobber values, return NULL for them once
2301 we do. */
2302 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2303 return item->value;
2304 }
2305 return NULL;
2306 }
2307
2308 /* Remove a reference to SYMBOL from the list of references of a node given by
2309 reference description RDESC. */
2310
2311 static void
2312 remove_described_reference (symtab_node symbol, struct ipa_cst_ref_desc *rdesc)
2313 {
2314 struct ipa_ref *to_del;
2315 struct cgraph_edge *origin;
2316
2317 origin = rdesc->cs;
2318 to_del = ipa_find_reference ((symtab_node) origin->caller, symbol,
2319 origin->call_stmt);
2320 gcc_assert (to_del);
2321 ipa_remove_reference (to_del);
2322 if (dump_file)
2323 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2324 xstrdup (cgraph_node_name (origin->caller)),
2325 origin->caller->symbol.order, xstrdup (symtab_node_name (symbol)));
2326 }
2327
2328 /* If JFUNC has a reference description with refcount different from
2329 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2330 NULL. JFUNC must be a constant jump function. */
2331
2332 static struct ipa_cst_ref_desc *
2333 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2334 {
2335 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2336 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2337 return rdesc;
2338 else
2339 return NULL;
2340 }
2341
2342 /* Try to find a destination for indirect edge IE that corresponds to a simple
2343 call or a call of a member function pointer and where the destination is a
2344 pointer formal parameter described by jump function JFUNC. If it can be
2345 determined, return the newly direct edge, otherwise return NULL.
2346 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2347
2348 static struct cgraph_edge *
2349 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2350 struct ipa_jump_func *jfunc,
2351 struct ipa_node_params *new_root_info)
2352 {
2353 struct ipa_cst_ref_desc *rdesc;
2354 struct cgraph_edge *cs;
2355 tree target;
2356
2357 if (ie->indirect_info->agg_contents)
2358 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2359 ie->indirect_info->offset,
2360 ie->indirect_info->by_ref);
2361 else
2362 target = ipa_value_from_jfunc (new_root_info, jfunc);
2363 if (!target)
2364 return NULL;
2365 cs = ipa_make_edge_direct_to_target (ie, target);
2366
2367 if (cs && !ie->indirect_info->agg_contents
2368 && jfunc->type == IPA_JF_CONST
2369 && (rdesc = jfunc_rdesc_usable (jfunc))
2370 && --rdesc->refcount == 0)
2371 remove_described_reference ((symtab_node) cs->callee, rdesc);
2372
2373 return cs;
2374 }
2375
2376 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2377 call based on a formal parameter which is described by jump function JFUNC
2378 and if it can be determined, make it direct and return the direct edge.
2379 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2380 are relative to. */
2381
2382 static struct cgraph_edge *
2383 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2384 struct ipa_jump_func *jfunc,
2385 struct ipa_node_params *new_root_info)
2386 {
2387 tree binfo, target;
2388
2389 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2390
2391 if (!binfo)
2392 return NULL;
2393
2394 if (TREE_CODE (binfo) != TREE_BINFO)
2395 {
2396 binfo = gimple_extract_devirt_binfo_from_cst (binfo);
2397 if (!binfo)
2398 return NULL;
2399 }
2400
2401 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2402 ie->indirect_info->otr_type);
2403 if (binfo)
2404 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2405 binfo);
2406 else
2407 return NULL;
2408
2409 if (target)
2410 return ipa_make_edge_direct_to_target (ie, target);
2411 else
2412 return NULL;
2413 }
2414
2415 /* Update the param called notes associated with NODE when CS is being inlined,
2416 assuming NODE is (potentially indirectly) inlined into CS->callee.
2417 Moreover, if the callee is discovered to be constant, create a new cgraph
2418 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2419 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2420
2421 static bool
2422 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2423 struct cgraph_node *node,
2424 vec<cgraph_edge_p> *new_edges)
2425 {
2426 struct ipa_edge_args *top;
2427 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2428 struct ipa_node_params *new_root_info;
2429 bool res = false;
2430
2431 ipa_check_create_edge_args ();
2432 top = IPA_EDGE_REF (cs);
2433 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2434 ? cs->caller->global.inlined_to
2435 : cs->caller);
2436
2437 for (ie = node->indirect_calls; ie; ie = next_ie)
2438 {
2439 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2440 struct ipa_jump_func *jfunc;
2441 int param_index;
2442
2443 next_ie = ie->next_callee;
2444
2445 if (ici->param_index == -1)
2446 continue;
2447
2448 /* We must check range due to calls with variable number of arguments: */
2449 if (ici->param_index >= ipa_get_cs_argument_count (top))
2450 {
2451 ici->param_index = -1;
2452 continue;
2453 }
2454
2455 param_index = ici->param_index;
2456 jfunc = ipa_get_ith_jump_func (top, param_index);
2457
2458 if (!flag_indirect_inlining)
2459 new_direct_edge = NULL;
2460 else if (ici->polymorphic)
2461 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2462 new_root_info);
2463 else
2464 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2465 new_root_info);
2466 if (new_direct_edge)
2467 {
2468 new_direct_edge->indirect_inlining_edge = 1;
2469 if (new_direct_edge->call_stmt)
2470 new_direct_edge->call_stmt_cannot_inline_p
2471 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
2472 new_direct_edge->callee->symbol.decl);
2473 if (new_edges)
2474 {
2475 new_edges->safe_push (new_direct_edge);
2476 top = IPA_EDGE_REF (cs);
2477 res = true;
2478 }
2479 }
2480 else if (jfunc->type == IPA_JF_PASS_THROUGH
2481 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2482 {
2483 if (ici->agg_contents
2484 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2485 ici->param_index = -1;
2486 else
2487 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2488 }
2489 else if (jfunc->type == IPA_JF_ANCESTOR)
2490 {
2491 if (ici->agg_contents
2492 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2493 ici->param_index = -1;
2494 else
2495 {
2496 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2497 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2498 }
2499 }
2500 else
2501 /* Either we can find a destination for this edge now or never. */
2502 ici->param_index = -1;
2503 }
2504
2505 return res;
2506 }
2507
2508 /* Recursively traverse subtree of NODE (including node) made of inlined
2509 cgraph_edges when CS has been inlined and invoke
2510 update_indirect_edges_after_inlining on all nodes and
2511 update_jump_functions_after_inlining on all non-inlined edges that lead out
2512 of this subtree. Newly discovered indirect edges will be added to
2513 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2514 created. */
2515
2516 static bool
2517 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2518 struct cgraph_node *node,
2519 vec<cgraph_edge_p> *new_edges)
2520 {
2521 struct cgraph_edge *e;
2522 bool res;
2523
2524 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2525
2526 for (e = node->callees; e; e = e->next_callee)
2527 if (!e->inline_failed)
2528 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2529 else
2530 update_jump_functions_after_inlining (cs, e);
2531 for (e = node->indirect_calls; e; e = e->next_callee)
2532 update_jump_functions_after_inlining (cs, e);
2533
2534 return res;
2535 }
2536
2537 /* Combine two controlled uses counts as done during inlining. */
2538
2539 static int
2540 combine_controlled_uses_counters (int c, int d)
2541 {
2542 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2543 return IPA_UNDESCRIBED_USE;
2544 else
2545 return c + d - 1;
2546 }
2547
2548 /* Propagate number of controlled users from CS->caleee to the new root of the
2549 tree of inlined nodes. */
2550
2551 static void
2552 propagate_controlled_uses (struct cgraph_edge *cs)
2553 {
2554 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2555 struct cgraph_node *new_root = cs->caller->global.inlined_to
2556 ? cs->caller->global.inlined_to : cs->caller;
2557 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2558 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2559 int count, i;
2560
2561 count = MIN (ipa_get_cs_argument_count (args),
2562 ipa_get_param_count (old_root_info));
2563 for (i = 0; i < count; i++)
2564 {
2565 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2566 struct ipa_cst_ref_desc *rdesc;
2567
2568 if (jf->type == IPA_JF_PASS_THROUGH)
2569 {
2570 int src_idx, c, d;
2571 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2572 c = ipa_get_controlled_uses (new_root_info, src_idx);
2573 d = ipa_get_controlled_uses (old_root_info, i);
2574
2575 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2576 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2577 c = combine_controlled_uses_counters (c, d);
2578 ipa_set_controlled_uses (new_root_info, src_idx, c);
2579 if (c == 0 && new_root_info->ipcp_orig_node)
2580 {
2581 struct cgraph_node *n;
2582 struct ipa_ref *ref;
2583 tree t = new_root_info->known_vals[src_idx];
2584
2585 if (t && TREE_CODE (t) == ADDR_EXPR
2586 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2587 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2588 && (ref = ipa_find_reference ((symtab_node) new_root,
2589 (symtab_node) n, NULL)))
2590 {
2591 if (dump_file)
2592 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2593 "reference from %s/%i to %s/%i.\n",
2594 xstrdup (cgraph_node_name (new_root)),
2595 new_root->symbol.order,
2596 xstrdup (cgraph_node_name (n)), n->symbol.order);
2597 ipa_remove_reference (ref);
2598 }
2599 }
2600 }
2601 else if (jf->type == IPA_JF_CONST
2602 && (rdesc = jfunc_rdesc_usable (jf)))
2603 {
2604 int d = ipa_get_controlled_uses (old_root_info, i);
2605 int c = rdesc->refcount;
2606 rdesc->refcount = combine_controlled_uses_counters (c, d);
2607 if (rdesc->refcount == 0)
2608 {
2609 tree cst = ipa_get_jf_constant (jf);
2610 struct cgraph_node *n;
2611 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2612 && TREE_CODE (TREE_OPERAND (cst, 0))
2613 == FUNCTION_DECL);
2614 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2615 if (n)
2616 {
2617 struct cgraph_node *clone;
2618 remove_described_reference ((symtab_node) n, rdesc);
2619
2620 clone = cs->caller;
2621 while (clone->global.inlined_to
2622 && clone != rdesc->cs->caller
2623 && IPA_NODE_REF (clone)->ipcp_orig_node)
2624 {
2625 struct ipa_ref *ref;
2626 ref = ipa_find_reference ((symtab_node) clone,
2627 (symtab_node) n, NULL);
2628 if (ref)
2629 {
2630 if (dump_file)
2631 fprintf (dump_file, "ipa-prop: Removing "
2632 "cloning-created reference "
2633 "from %s/%i to %s/%i.\n",
2634 xstrdup (cgraph_node_name (clone)),
2635 clone->symbol.order,
2636 xstrdup (cgraph_node_name (n)),
2637 n->symbol.order);
2638 ipa_remove_reference (ref);
2639 }
2640 clone = clone->callers->caller;
2641 }
2642 }
2643 }
2644 }
2645 }
2646
2647 for (i = ipa_get_param_count (old_root_info);
2648 i < ipa_get_cs_argument_count (args);
2649 i++)
2650 {
2651 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2652
2653 if (jf->type == IPA_JF_CONST)
2654 {
2655 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2656 if (rdesc)
2657 rdesc->refcount = IPA_UNDESCRIBED_USE;
2658 }
2659 else if (jf->type == IPA_JF_PASS_THROUGH)
2660 ipa_set_controlled_uses (new_root_info,
2661 jf->value.pass_through.formal_id,
2662 IPA_UNDESCRIBED_USE);
2663 }
2664 }
2665
2666 /* Update jump functions and call note functions on inlining the call site CS.
2667 CS is expected to lead to a node already cloned by
2668 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2669 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2670 created. */
2671
2672 bool
2673 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2674 vec<cgraph_edge_p> *new_edges)
2675 {
2676 bool changed;
2677 /* Do nothing if the preparation phase has not been carried out yet
2678 (i.e. during early inlining). */
2679 if (!ipa_node_params_vector.exists ())
2680 return false;
2681 gcc_assert (ipa_edge_args_vector);
2682
2683 propagate_controlled_uses (cs);
2684 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2685
2686 /* We do not keep jump functions of inlined edges up to date. Better to free
2687 them so we do not access them accidentally. */
2688 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2689 return changed;
2690 }
2691
2692 /* Frees all dynamically allocated structures that the argument info points
2693 to. */
2694
2695 void
2696 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2697 {
2698 vec_free (args->jump_functions);
2699 memset (args, 0, sizeof (*args));
2700 }
2701
2702 /* Free all ipa_edge structures. */
2703
2704 void
2705 ipa_free_all_edge_args (void)
2706 {
2707 int i;
2708 struct ipa_edge_args *args;
2709
2710 if (!ipa_edge_args_vector)
2711 return;
2712
2713 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2714 ipa_free_edge_args_substructures (args);
2715
2716 vec_free (ipa_edge_args_vector);
2717 }
2718
2719 /* Frees all dynamically allocated structures that the param info points
2720 to. */
2721
2722 void
2723 ipa_free_node_params_substructures (struct ipa_node_params *info)
2724 {
2725 info->descriptors.release ();
2726 free (info->lattices);
2727 /* Lattice values and their sources are deallocated with their alocation
2728 pool. */
2729 info->known_vals.release ();
2730 memset (info, 0, sizeof (*info));
2731 }
2732
2733 /* Free all ipa_node_params structures. */
2734
2735 void
2736 ipa_free_all_node_params (void)
2737 {
2738 int i;
2739 struct ipa_node_params *info;
2740
2741 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
2742 ipa_free_node_params_substructures (info);
2743
2744 ipa_node_params_vector.release ();
2745 }
2746
2747 /* Set the aggregate replacements of NODE to be AGGVALS. */
2748
2749 void
2750 ipa_set_node_agg_value_chain (struct cgraph_node *node,
2751 struct ipa_agg_replacement_value *aggvals)
2752 {
2753 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
2754 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2755
2756 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2757 }
2758
2759 /* Hook that is called by cgraph.c when an edge is removed. */
2760
2761 static void
2762 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2763 {
2764 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2765 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
2766 return;
2767 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2768 }
2769
2770 /* Hook that is called by cgraph.c when a node is removed. */
2771
2772 static void
2773 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2774 {
2775 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2776 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2777 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2778 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
2779 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
2780 }
2781
2782 /* Hook that is called by cgraph.c when an edge is duplicated. */
2783
2784 static void
2785 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2786 __attribute__((unused)) void *data)
2787 {
2788 struct ipa_edge_args *old_args, *new_args;
2789 unsigned int i;
2790
2791 ipa_check_create_edge_args ();
2792
2793 old_args = IPA_EDGE_REF (src);
2794 new_args = IPA_EDGE_REF (dst);
2795
2796 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
2797
2798 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
2799 {
2800 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
2801 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
2802
2803 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
2804
2805 if (src_jf->type == IPA_JF_CONST)
2806 {
2807 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
2808
2809 if (!src_rdesc)
2810 dst_jf->value.constant.rdesc = NULL;
2811 else if (src_rdesc->cs == src)
2812 {
2813 struct ipa_cst_ref_desc *dst_rdesc;
2814 gcc_checking_assert (ipa_refdesc_pool);
2815 dst_rdesc
2816 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
2817 dst_rdesc->cs = dst;
2818 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
2819 src_rdesc->next_duplicate = dst_rdesc;
2820 dst_rdesc->refcount = src_rdesc->refcount;
2821 dst_jf->value.constant.rdesc = dst_rdesc;
2822 }
2823 else
2824 {
2825 struct ipa_cst_ref_desc *dst_rdesc;
2826 /* This can happen during inlining, when a JFUNC can refer to a
2827 reference taken in a function up in the tree of inline clones.
2828 We need to find the duplicate that refers to our tree of
2829 inline clones. */
2830
2831 gcc_assert (dst->caller->global.inlined_to);
2832 for (dst_rdesc = src_rdesc->next_duplicate;
2833 dst_rdesc;
2834 dst_rdesc = dst_rdesc->next_duplicate)
2835 {
2836 gcc_assert (dst_rdesc->cs->caller->global.inlined_to);
2837 if (dst_rdesc->cs->caller->global.inlined_to
2838 == dst->caller->global.inlined_to)
2839 break;
2840 }
2841
2842 dst_jf->value.constant.rdesc = dst_rdesc;
2843 }
2844 }
2845 }
2846 }
2847
2848 /* Hook that is called by cgraph.c when a node is duplicated. */
2849
2850 static void
2851 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2852 ATTRIBUTE_UNUSED void *data)
2853 {
2854 struct ipa_node_params *old_info, *new_info;
2855 struct ipa_agg_replacement_value *old_av, *new_av;
2856
2857 ipa_check_create_node_params ();
2858 old_info = IPA_NODE_REF (src);
2859 new_info = IPA_NODE_REF (dst);
2860
2861 new_info->descriptors = old_info->descriptors.copy ();
2862 new_info->lattices = NULL;
2863 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2864
2865 new_info->uses_analysis_done = old_info->uses_analysis_done;
2866 new_info->node_enqueued = old_info->node_enqueued;
2867
2868 old_av = ipa_get_agg_replacements_for_node (src);
2869 if (!old_av)
2870 return;
2871
2872 new_av = NULL;
2873 while (old_av)
2874 {
2875 struct ipa_agg_replacement_value *v;
2876
2877 v = ggc_alloc_ipa_agg_replacement_value ();
2878 memcpy (v, old_av, sizeof (*v));
2879 v->next = new_av;
2880 new_av = v;
2881 old_av = old_av->next;
2882 }
2883 ipa_set_node_agg_value_chain (dst, new_av);
2884 }
2885
2886
2887 /* Analyze newly added function into callgraph. */
2888
2889 static void
2890 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2891 {
2892 ipa_analyze_node (node);
2893 }
2894
2895 /* Register our cgraph hooks if they are not already there. */
2896
2897 void
2898 ipa_register_cgraph_hooks (void)
2899 {
2900 if (!edge_removal_hook_holder)
2901 edge_removal_hook_holder =
2902 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2903 if (!node_removal_hook_holder)
2904 node_removal_hook_holder =
2905 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2906 if (!edge_duplication_hook_holder)
2907 edge_duplication_hook_holder =
2908 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2909 if (!node_duplication_hook_holder)
2910 node_duplication_hook_holder =
2911 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2912 function_insertion_hook_holder =
2913 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2914 }
2915
2916 /* Unregister our cgraph hooks if they are not already there. */
2917
2918 static void
2919 ipa_unregister_cgraph_hooks (void)
2920 {
2921 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2922 edge_removal_hook_holder = NULL;
2923 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2924 node_removal_hook_holder = NULL;
2925 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2926 edge_duplication_hook_holder = NULL;
2927 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2928 node_duplication_hook_holder = NULL;
2929 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2930 function_insertion_hook_holder = NULL;
2931 }
2932
2933 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2934 longer needed after ipa-cp. */
2935
2936 void
2937 ipa_free_all_structures_after_ipa_cp (void)
2938 {
2939 if (!optimize)
2940 {
2941 ipa_free_all_edge_args ();
2942 ipa_free_all_node_params ();
2943 free_alloc_pool (ipcp_sources_pool);
2944 free_alloc_pool (ipcp_values_pool);
2945 free_alloc_pool (ipcp_agg_lattice_pool);
2946 ipa_unregister_cgraph_hooks ();
2947 if (ipa_refdesc_pool)
2948 free_alloc_pool (ipa_refdesc_pool);
2949 }
2950 }
2951
2952 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2953 longer needed after indirect inlining. */
2954
2955 void
2956 ipa_free_all_structures_after_iinln (void)
2957 {
2958 ipa_free_all_edge_args ();
2959 ipa_free_all_node_params ();
2960 ipa_unregister_cgraph_hooks ();
2961 if (ipcp_sources_pool)
2962 free_alloc_pool (ipcp_sources_pool);
2963 if (ipcp_values_pool)
2964 free_alloc_pool (ipcp_values_pool);
2965 if (ipcp_agg_lattice_pool)
2966 free_alloc_pool (ipcp_agg_lattice_pool);
2967 if (ipa_refdesc_pool)
2968 free_alloc_pool (ipa_refdesc_pool);
2969 }
2970
2971 /* Print ipa_tree_map data structures of all functions in the
2972 callgraph to F. */
2973
2974 void
2975 ipa_print_node_params (FILE *f, struct cgraph_node *node)
2976 {
2977 int i, count;
2978 tree temp;
2979 struct ipa_node_params *info;
2980
2981 if (!node->symbol.definition)
2982 return;
2983 info = IPA_NODE_REF (node);
2984 fprintf (f, " function %s/%i parameter descriptors:\n",
2985 cgraph_node_name (node), node->symbol.order);
2986 count = ipa_get_param_count (info);
2987 for (i = 0; i < count; i++)
2988 {
2989 int c;
2990
2991 temp = ipa_get_param (info, i);
2992 if (TREE_CODE (temp) == PARM_DECL)
2993 fprintf (f, " param %d : %s", i,
2994 (DECL_NAME (temp)
2995 ? (*lang_hooks.decl_printable_name) (temp, 2)
2996 : "(unnamed)"));
2997 if (ipa_is_param_used (info, i))
2998 fprintf (f, " used");
2999 c = ipa_get_controlled_uses (info, i);
3000 if (c == IPA_UNDESCRIBED_USE)
3001 fprintf (f, " undescribed_use");
3002 else
3003 fprintf (f, " controlled_uses=%i", c);
3004 fprintf (f, "\n");
3005 }
3006 }
3007
3008 /* Print ipa_tree_map data structures of all functions in the
3009 callgraph to F. */
3010
3011 void
3012 ipa_print_all_params (FILE * f)
3013 {
3014 struct cgraph_node *node;
3015
3016 fprintf (f, "\nFunction parameters:\n");
3017 FOR_EACH_FUNCTION (node)
3018 ipa_print_node_params (f, node);
3019 }
3020
3021 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3022
3023 vec<tree>
3024 ipa_get_vector_of_formal_parms (tree fndecl)
3025 {
3026 vec<tree> args;
3027 int count;
3028 tree parm;
3029
3030 count = count_formal_params (fndecl);
3031 args.create (count);
3032 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3033 args.quick_push (parm);
3034
3035 return args;
3036 }
3037
3038 /* Return a heap allocated vector containing types of formal parameters of
3039 function type FNTYPE. */
3040
3041 static inline vec<tree>
3042 get_vector_of_formal_parm_types (tree fntype)
3043 {
3044 vec<tree> types;
3045 int count = 0;
3046 tree t;
3047
3048 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3049 count++;
3050
3051 types.create (count);
3052 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3053 types.quick_push (TREE_VALUE (t));
3054
3055 return types;
3056 }
3057
3058 /* Modify the function declaration FNDECL and its type according to the plan in
3059 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3060 to reflect the actual parameters being modified which are determined by the
3061 base_index field. */
3062
3063 void
3064 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3065 const char *synth_parm_prefix)
3066 {
3067 vec<tree> oparms, otypes;
3068 tree orig_type, new_type = NULL;
3069 tree old_arg_types, t, new_arg_types = NULL;
3070 tree parm, *link = &DECL_ARGUMENTS (fndecl);
3071 int i, len = adjustments.length ();
3072 tree new_reversed = NULL;
3073 bool care_for_types, last_parm_void;
3074
3075 if (!synth_parm_prefix)
3076 synth_parm_prefix = "SYNTH";
3077
3078 oparms = ipa_get_vector_of_formal_parms (fndecl);
3079 orig_type = TREE_TYPE (fndecl);
3080 old_arg_types = TYPE_ARG_TYPES (orig_type);
3081
3082 /* The following test is an ugly hack, some functions simply don't have any
3083 arguments in their type. This is probably a bug but well... */
3084 care_for_types = (old_arg_types != NULL_TREE);
3085 if (care_for_types)
3086 {
3087 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3088 == void_type_node);
3089 otypes = get_vector_of_formal_parm_types (orig_type);
3090 if (last_parm_void)
3091 gcc_assert (oparms.length () + 1 == otypes.length ());
3092 else
3093 gcc_assert (oparms.length () == otypes.length ());
3094 }
3095 else
3096 {
3097 last_parm_void = false;
3098 otypes.create (0);
3099 }
3100
3101 for (i = 0; i < len; i++)
3102 {
3103 struct ipa_parm_adjustment *adj;
3104 gcc_assert (link);
3105
3106 adj = &adjustments[i];
3107 parm = oparms[adj->base_index];
3108 adj->base = parm;
3109
3110 if (adj->copy_param)
3111 {
3112 if (care_for_types)
3113 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3114 new_arg_types);
3115 *link = parm;
3116 link = &DECL_CHAIN (parm);
3117 }
3118 else if (!adj->remove_param)
3119 {
3120 tree new_parm;
3121 tree ptype;
3122
3123 if (adj->by_ref)
3124 ptype = build_pointer_type (adj->type);
3125 else
3126 ptype = adj->type;
3127
3128 if (care_for_types)
3129 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3130
3131 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3132 ptype);
3133 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3134
3135 DECL_ARTIFICIAL (new_parm) = 1;
3136 DECL_ARG_TYPE (new_parm) = ptype;
3137 DECL_CONTEXT (new_parm) = fndecl;
3138 TREE_USED (new_parm) = 1;
3139 DECL_IGNORED_P (new_parm) = 1;
3140 layout_decl (new_parm, 0);
3141
3142 adj->base = parm;
3143 adj->reduction = new_parm;
3144
3145 *link = new_parm;
3146
3147 link = &DECL_CHAIN (new_parm);
3148 }
3149 }
3150
3151 *link = NULL_TREE;
3152
3153 if (care_for_types)
3154 {
3155 new_reversed = nreverse (new_arg_types);
3156 if (last_parm_void)
3157 {
3158 if (new_reversed)
3159 TREE_CHAIN (new_arg_types) = void_list_node;
3160 else
3161 new_reversed = void_list_node;
3162 }
3163 }
3164
3165 /* Use copy_node to preserve as much as possible from original type
3166 (debug info, attribute lists etc.)
3167 Exception is METHOD_TYPEs must have THIS argument.
3168 When we are asked to remove it, we need to build new FUNCTION_TYPE
3169 instead. */
3170 if (TREE_CODE (orig_type) != METHOD_TYPE
3171 || (adjustments[0].copy_param
3172 && adjustments[0].base_index == 0))
3173 {
3174 new_type = build_distinct_type_copy (orig_type);
3175 TYPE_ARG_TYPES (new_type) = new_reversed;
3176 }
3177 else
3178 {
3179 new_type
3180 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3181 new_reversed));
3182 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3183 DECL_VINDEX (fndecl) = NULL_TREE;
3184 }
3185
3186 /* When signature changes, we need to clear builtin info. */
3187 if (DECL_BUILT_IN (fndecl))
3188 {
3189 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3190 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3191 }
3192
3193 /* This is a new type, not a copy of an old type. Need to reassociate
3194 variants. We can handle everything except the main variant lazily. */
3195 t = TYPE_MAIN_VARIANT (orig_type);
3196 if (orig_type != t)
3197 {
3198 TYPE_MAIN_VARIANT (new_type) = t;
3199 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3200 TYPE_NEXT_VARIANT (t) = new_type;
3201 }
3202 else
3203 {
3204 TYPE_MAIN_VARIANT (new_type) = new_type;
3205 TYPE_NEXT_VARIANT (new_type) = NULL;
3206 }
3207
3208 TREE_TYPE (fndecl) = new_type;
3209 DECL_VIRTUAL_P (fndecl) = 0;
3210 otypes.release ();
3211 oparms.release ();
3212 }
3213
3214 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3215 If this is a directly recursive call, CS must be NULL. Otherwise it must
3216 contain the corresponding call graph edge. */
3217
3218 void
3219 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3220 ipa_parm_adjustment_vec adjustments)
3221 {
3222 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3223 vec<tree> vargs;
3224 vec<tree, va_gc> **debug_args = NULL;
3225 gimple new_stmt;
3226 gimple_stmt_iterator gsi, prev_gsi;
3227 tree callee_decl;
3228 int i, len;
3229
3230 len = adjustments.length ();
3231 vargs.create (len);
3232 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
3233 ipa_remove_stmt_references ((symtab_node) current_node, stmt);
3234
3235 gsi = gsi_for_stmt (stmt);
3236 prev_gsi = gsi;
3237 gsi_prev (&prev_gsi);
3238 for (i = 0; i < len; i++)
3239 {
3240 struct ipa_parm_adjustment *adj;
3241
3242 adj = &adjustments[i];
3243
3244 if (adj->copy_param)
3245 {
3246 tree arg = gimple_call_arg (stmt, adj->base_index);
3247
3248 vargs.quick_push (arg);
3249 }
3250 else if (!adj->remove_param)
3251 {
3252 tree expr, base, off;
3253 location_t loc;
3254 unsigned int deref_align;
3255 bool deref_base = false;
3256
3257 /* We create a new parameter out of the value of the old one, we can
3258 do the following kind of transformations:
3259
3260 - A scalar passed by reference is converted to a scalar passed by
3261 value. (adj->by_ref is false and the type of the original
3262 actual argument is a pointer to a scalar).
3263
3264 - A part of an aggregate is passed instead of the whole aggregate.
3265 The part can be passed either by value or by reference, this is
3266 determined by value of adj->by_ref. Moreover, the code below
3267 handles both situations when the original aggregate is passed by
3268 value (its type is not a pointer) and when it is passed by
3269 reference (it is a pointer to an aggregate).
3270
3271 When the new argument is passed by reference (adj->by_ref is true)
3272 it must be a part of an aggregate and therefore we form it by
3273 simply taking the address of a reference inside the original
3274 aggregate. */
3275
3276 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3277 base = gimple_call_arg (stmt, adj->base_index);
3278 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3279 : EXPR_LOCATION (base);
3280
3281 if (TREE_CODE (base) != ADDR_EXPR
3282 && POINTER_TYPE_P (TREE_TYPE (base)))
3283 off = build_int_cst (adj->alias_ptr_type,
3284 adj->offset / BITS_PER_UNIT);
3285 else
3286 {
3287 HOST_WIDE_INT base_offset;
3288 tree prev_base;
3289 bool addrof;
3290
3291 if (TREE_CODE (base) == ADDR_EXPR)
3292 {
3293 base = TREE_OPERAND (base, 0);
3294 addrof = true;
3295 }
3296 else
3297 addrof = false;
3298 prev_base = base;
3299 base = get_addr_base_and_unit_offset (base, &base_offset);
3300 /* Aggregate arguments can have non-invariant addresses. */
3301 if (!base)
3302 {
3303 base = build_fold_addr_expr (prev_base);
3304 off = build_int_cst (adj->alias_ptr_type,
3305 adj->offset / BITS_PER_UNIT);
3306 }
3307 else if (TREE_CODE (base) == MEM_REF)
3308 {
3309 if (!addrof)
3310 {
3311 deref_base = true;
3312 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3313 }
3314 off = build_int_cst (adj->alias_ptr_type,
3315 base_offset
3316 + adj->offset / BITS_PER_UNIT);
3317 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3318 off);
3319 base = TREE_OPERAND (base, 0);
3320 }
3321 else
3322 {
3323 off = build_int_cst (adj->alias_ptr_type,
3324 base_offset
3325 + adj->offset / BITS_PER_UNIT);
3326 base = build_fold_addr_expr (base);
3327 }
3328 }
3329
3330 if (!adj->by_ref)
3331 {
3332 tree type = adj->type;
3333 unsigned int align;
3334 unsigned HOST_WIDE_INT misalign;
3335
3336 if (deref_base)
3337 {
3338 align = deref_align;
3339 misalign = 0;
3340 }
3341 else
3342 {
3343 get_pointer_alignment_1 (base, &align, &misalign);
3344 if (TYPE_ALIGN (type) > align)
3345 align = TYPE_ALIGN (type);
3346 }
3347 misalign += (tree_to_double_int (off)
3348 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3349 * BITS_PER_UNIT);
3350 misalign = misalign & (align - 1);
3351 if (misalign != 0)
3352 align = (misalign & -misalign);
3353 if (align < TYPE_ALIGN (type))
3354 type = build_aligned_type (type, align);
3355 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3356 }
3357 else
3358 {
3359 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3360 expr = build_fold_addr_expr (expr);
3361 }
3362
3363 expr = force_gimple_operand_gsi (&gsi, expr,
3364 adj->by_ref
3365 || is_gimple_reg_type (adj->type),
3366 NULL, true, GSI_SAME_STMT);
3367 vargs.quick_push (expr);
3368 }
3369 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3370 {
3371 unsigned int ix;
3372 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3373 gimple def_temp;
3374
3375 arg = gimple_call_arg (stmt, adj->base_index);
3376 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3377 {
3378 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3379 continue;
3380 arg = fold_convert_loc (gimple_location (stmt),
3381 TREE_TYPE (origin), arg);
3382 }
3383 if (debug_args == NULL)
3384 debug_args = decl_debug_args_insert (callee_decl);
3385 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3386 if (ddecl == origin)
3387 {
3388 ddecl = (**debug_args)[ix + 1];
3389 break;
3390 }
3391 if (ddecl == NULL)
3392 {
3393 ddecl = make_node (DEBUG_EXPR_DECL);
3394 DECL_ARTIFICIAL (ddecl) = 1;
3395 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3396 DECL_MODE (ddecl) = DECL_MODE (origin);
3397
3398 vec_safe_push (*debug_args, origin);
3399 vec_safe_push (*debug_args, ddecl);
3400 }
3401 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3402 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3403 }
3404 }
3405
3406 if (dump_file && (dump_flags & TDF_DETAILS))
3407 {
3408 fprintf (dump_file, "replacing stmt:");
3409 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3410 }
3411
3412 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3413 vargs.release ();
3414 if (gimple_call_lhs (stmt))
3415 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3416
3417 gimple_set_block (new_stmt, gimple_block (stmt));
3418 if (gimple_has_location (stmt))
3419 gimple_set_location (new_stmt, gimple_location (stmt));
3420 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3421 gimple_call_copy_flags (new_stmt, stmt);
3422
3423 if (dump_file && (dump_flags & TDF_DETAILS))
3424 {
3425 fprintf (dump_file, "with stmt:");
3426 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3427 fprintf (dump_file, "\n");
3428 }
3429 gsi_replace (&gsi, new_stmt, true);
3430 if (cs)
3431 cgraph_set_call_stmt (cs, new_stmt);
3432 do
3433 {
3434 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3435 gsi_prev (&gsi);
3436 }
3437 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3438 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3439
3440 update_ssa (TODO_update_ssa);
3441 free_dominance_info (CDI_DOMINATORS);
3442 }
3443
3444 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3445
3446 static bool
3447 index_in_adjustments_multiple_times_p (int base_index,
3448 ipa_parm_adjustment_vec adjustments)
3449 {
3450 int i, len = adjustments.length ();
3451 bool one = false;
3452
3453 for (i = 0; i < len; i++)
3454 {
3455 struct ipa_parm_adjustment *adj;
3456 adj = &adjustments[i];
3457
3458 if (adj->base_index == base_index)
3459 {
3460 if (one)
3461 return true;
3462 else
3463 one = true;
3464 }
3465 }
3466 return false;
3467 }
3468
3469
3470 /* Return adjustments that should have the same effect on function parameters
3471 and call arguments as if they were first changed according to adjustments in
3472 INNER and then by adjustments in OUTER. */
3473
3474 ipa_parm_adjustment_vec
3475 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3476 ipa_parm_adjustment_vec outer)
3477 {
3478 int i, outlen = outer.length ();
3479 int inlen = inner.length ();
3480 int removals = 0;
3481 ipa_parm_adjustment_vec adjustments, tmp;
3482
3483 tmp.create (inlen);
3484 for (i = 0; i < inlen; i++)
3485 {
3486 struct ipa_parm_adjustment *n;
3487 n = &inner[i];
3488
3489 if (n->remove_param)
3490 removals++;
3491 else
3492 tmp.quick_push (*n);
3493 }
3494
3495 adjustments.create (outlen + removals);
3496 for (i = 0; i < outlen; i++)
3497 {
3498 struct ipa_parm_adjustment r;
3499 struct ipa_parm_adjustment *out = &outer[i];
3500 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3501
3502 memset (&r, 0, sizeof (r));
3503 gcc_assert (!in->remove_param);
3504 if (out->remove_param)
3505 {
3506 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3507 {
3508 r.remove_param = true;
3509 adjustments.quick_push (r);
3510 }
3511 continue;
3512 }
3513
3514 r.base_index = in->base_index;
3515 r.type = out->type;
3516
3517 /* FIXME: Create nonlocal value too. */
3518
3519 if (in->copy_param && out->copy_param)
3520 r.copy_param = true;
3521 else if (in->copy_param)
3522 r.offset = out->offset;
3523 else if (out->copy_param)
3524 r.offset = in->offset;
3525 else
3526 r.offset = in->offset + out->offset;
3527 adjustments.quick_push (r);
3528 }
3529
3530 for (i = 0; i < inlen; i++)
3531 {
3532 struct ipa_parm_adjustment *n = &inner[i];
3533
3534 if (n->remove_param)
3535 adjustments.quick_push (*n);
3536 }
3537
3538 tmp.release ();
3539 return adjustments;
3540 }
3541
3542 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3543 friendly way, assuming they are meant to be applied to FNDECL. */
3544
3545 void
3546 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3547 tree fndecl)
3548 {
3549 int i, len = adjustments.length ();
3550 bool first = true;
3551 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3552
3553 fprintf (file, "IPA param adjustments: ");
3554 for (i = 0; i < len; i++)
3555 {
3556 struct ipa_parm_adjustment *adj;
3557 adj = &adjustments[i];
3558
3559 if (!first)
3560 fprintf (file, " ");
3561 else
3562 first = false;
3563
3564 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3565 print_generic_expr (file, parms[adj->base_index], 0);
3566 if (adj->base)
3567 {
3568 fprintf (file, ", base: ");
3569 print_generic_expr (file, adj->base, 0);
3570 }
3571 if (adj->reduction)
3572 {
3573 fprintf (file, ", reduction: ");
3574 print_generic_expr (file, adj->reduction, 0);
3575 }
3576 if (adj->new_ssa_base)
3577 {
3578 fprintf (file, ", new_ssa_base: ");
3579 print_generic_expr (file, adj->new_ssa_base, 0);
3580 }
3581
3582 if (adj->copy_param)
3583 fprintf (file, ", copy_param");
3584 else if (adj->remove_param)
3585 fprintf (file, ", remove_param");
3586 else
3587 fprintf (file, ", offset %li", (long) adj->offset);
3588 if (adj->by_ref)
3589 fprintf (file, ", by_ref");
3590 print_node_brief (file, ", type: ", adj->type, 0);
3591 fprintf (file, "\n");
3592 }
3593 parms.release ();
3594 }
3595
3596 /* Dump the AV linked list. */
3597
3598 void
3599 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3600 {
3601 bool comma = false;
3602 fprintf (f, " Aggregate replacements:");
3603 for (; av; av = av->next)
3604 {
3605 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3606 av->index, av->offset);
3607 print_generic_expr (f, av->value, 0);
3608 comma = true;
3609 }
3610 fprintf (f, "\n");
3611 }
3612
3613 /* Stream out jump function JUMP_FUNC to OB. */
3614
3615 static void
3616 ipa_write_jump_function (struct output_block *ob,
3617 struct ipa_jump_func *jump_func)
3618 {
3619 struct ipa_agg_jf_item *item;
3620 struct bitpack_d bp;
3621 int i, count;
3622
3623 streamer_write_uhwi (ob, jump_func->type);
3624 switch (jump_func->type)
3625 {
3626 case IPA_JF_UNKNOWN:
3627 break;
3628 case IPA_JF_KNOWN_TYPE:
3629 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3630 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3631 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3632 break;
3633 case IPA_JF_CONST:
3634 gcc_assert (
3635 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3636 stream_write_tree (ob, jump_func->value.constant.value, true);
3637 break;
3638 case IPA_JF_PASS_THROUGH:
3639 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3640 if (jump_func->value.pass_through.operation == NOP_EXPR)
3641 {
3642 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3643 bp = bitpack_create (ob->main_stream);
3644 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3645 streamer_write_bitpack (&bp);
3646 }
3647 else
3648 {
3649 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3650 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3651 }
3652 break;
3653 case IPA_JF_ANCESTOR:
3654 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3655 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3656 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3657 bp = bitpack_create (ob->main_stream);
3658 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3659 streamer_write_bitpack (&bp);
3660 break;
3661 }
3662
3663 count = vec_safe_length (jump_func->agg.items);
3664 streamer_write_uhwi (ob, count);
3665 if (count)
3666 {
3667 bp = bitpack_create (ob->main_stream);
3668 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3669 streamer_write_bitpack (&bp);
3670 }
3671
3672 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3673 {
3674 streamer_write_uhwi (ob, item->offset);
3675 stream_write_tree (ob, item->value, true);
3676 }
3677 }
3678
3679 /* Read in jump function JUMP_FUNC from IB. */
3680
3681 static void
3682 ipa_read_jump_function (struct lto_input_block *ib,
3683 struct ipa_jump_func *jump_func,
3684 struct cgraph_edge *cs,
3685 struct data_in *data_in)
3686 {
3687 enum jump_func_type jftype;
3688 enum tree_code operation;
3689 int i, count;
3690
3691 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
3692 switch (jftype)
3693 {
3694 case IPA_JF_UNKNOWN:
3695 jump_func->type = IPA_JF_UNKNOWN;
3696 break;
3697 case IPA_JF_KNOWN_TYPE:
3698 {
3699 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3700 tree base_type = stream_read_tree (ib, data_in);
3701 tree component_type = stream_read_tree (ib, data_in);
3702
3703 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
3704 break;
3705 }
3706 case IPA_JF_CONST:
3707 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
3708 break;
3709 case IPA_JF_PASS_THROUGH:
3710 operation = (enum tree_code) streamer_read_uhwi (ib);
3711 if (operation == NOP_EXPR)
3712 {
3713 int formal_id = streamer_read_uhwi (ib);
3714 struct bitpack_d bp = streamer_read_bitpack (ib);
3715 bool agg_preserved = bp_unpack_value (&bp, 1);
3716 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
3717 }
3718 else
3719 {
3720 tree operand = stream_read_tree (ib, data_in);
3721 int formal_id = streamer_read_uhwi (ib);
3722 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
3723 operation);
3724 }
3725 break;
3726 case IPA_JF_ANCESTOR:
3727 {
3728 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3729 tree type = stream_read_tree (ib, data_in);
3730 int formal_id = streamer_read_uhwi (ib);
3731 struct bitpack_d bp = streamer_read_bitpack (ib);
3732 bool agg_preserved = bp_unpack_value (&bp, 1);
3733
3734 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved);
3735 break;
3736 }
3737 }
3738
3739 count = streamer_read_uhwi (ib);
3740 vec_alloc (jump_func->agg.items, count);
3741 if (count)
3742 {
3743 struct bitpack_d bp = streamer_read_bitpack (ib);
3744 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3745 }
3746 for (i = 0; i < count; i++)
3747 {
3748 struct ipa_agg_jf_item item;
3749 item.offset = streamer_read_uhwi (ib);
3750 item.value = stream_read_tree (ib, data_in);
3751 jump_func->agg.items->quick_push (item);
3752 }
3753 }
3754
3755 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3756 relevant to indirect inlining to OB. */
3757
3758 static void
3759 ipa_write_indirect_edge_info (struct output_block *ob,
3760 struct cgraph_edge *cs)
3761 {
3762 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3763 struct bitpack_d bp;
3764
3765 streamer_write_hwi (ob, ii->param_index);
3766 streamer_write_hwi (ob, ii->offset);
3767 bp = bitpack_create (ob->main_stream);
3768 bp_pack_value (&bp, ii->polymorphic, 1);
3769 bp_pack_value (&bp, ii->agg_contents, 1);
3770 bp_pack_value (&bp, ii->by_ref, 1);
3771 streamer_write_bitpack (&bp);
3772
3773 if (ii->polymorphic)
3774 {
3775 streamer_write_hwi (ob, ii->otr_token);
3776 stream_write_tree (ob, ii->otr_type, true);
3777 }
3778 }
3779
3780 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3781 relevant to indirect inlining from IB. */
3782
3783 static void
3784 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3785 struct data_in *data_in ATTRIBUTE_UNUSED,
3786 struct cgraph_edge *cs)
3787 {
3788 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3789 struct bitpack_d bp;
3790
3791 ii->param_index = (int) streamer_read_hwi (ib);
3792 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3793 bp = streamer_read_bitpack (ib);
3794 ii->polymorphic = bp_unpack_value (&bp, 1);
3795 ii->agg_contents = bp_unpack_value (&bp, 1);
3796 ii->by_ref = bp_unpack_value (&bp, 1);
3797 if (ii->polymorphic)
3798 {
3799 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3800 ii->otr_type = stream_read_tree (ib, data_in);
3801 }
3802 }
3803
3804 /* Stream out NODE info to OB. */
3805
3806 static void
3807 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3808 {
3809 int node_ref;
3810 lto_symtab_encoder_t encoder;
3811 struct ipa_node_params *info = IPA_NODE_REF (node);
3812 int j;
3813 struct cgraph_edge *e;
3814 struct bitpack_d bp;
3815
3816 encoder = ob->decl_state->symtab_node_encoder;
3817 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3818 streamer_write_uhwi (ob, node_ref);
3819
3820 bp = bitpack_create (ob->main_stream);
3821 gcc_assert (info->uses_analysis_done
3822 || ipa_get_param_count (info) == 0);
3823 gcc_assert (!info->node_enqueued);
3824 gcc_assert (!info->ipcp_orig_node);
3825 for (j = 0; j < ipa_get_param_count (info); j++)
3826 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3827 streamer_write_bitpack (&bp);
3828 for (j = 0; j < ipa_get_param_count (info); j++)
3829 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
3830 for (e = node->callees; e; e = e->next_callee)
3831 {
3832 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3833
3834 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3835 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3836 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3837 }
3838 for (e = node->indirect_calls; e; e = e->next_callee)
3839 {
3840 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3841
3842 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3843 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3844 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3845 ipa_write_indirect_edge_info (ob, e);
3846 }
3847 }
3848
3849 /* Stream in NODE info from IB. */
3850
3851 static void
3852 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3853 struct data_in *data_in)
3854 {
3855 struct ipa_node_params *info = IPA_NODE_REF (node);
3856 int k;
3857 struct cgraph_edge *e;
3858 struct bitpack_d bp;
3859
3860 ipa_initialize_node_params (node);
3861
3862 bp = streamer_read_bitpack (ib);
3863 if (ipa_get_param_count (info) != 0)
3864 info->uses_analysis_done = true;
3865 info->node_enqueued = false;
3866 for (k = 0; k < ipa_get_param_count (info); k++)
3867 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3868 for (k = 0; k < ipa_get_param_count (info); k++)
3869 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
3870 for (e = node->callees; e; e = e->next_callee)
3871 {
3872 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3873 int count = streamer_read_uhwi (ib);
3874
3875 if (!count)
3876 continue;
3877 vec_safe_grow_cleared (args->jump_functions, count);
3878
3879 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3880 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
3881 data_in);
3882 }
3883 for (e = node->indirect_calls; e; e = e->next_callee)
3884 {
3885 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3886 int count = streamer_read_uhwi (ib);
3887
3888 if (count)
3889 {
3890 vec_safe_grow_cleared (args->jump_functions, count);
3891 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3892 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
3893 data_in);
3894 }
3895 ipa_read_indirect_edge_info (ib, data_in, e);
3896 }
3897 }
3898
3899 /* Write jump functions for nodes in SET. */
3900
3901 void
3902 ipa_prop_write_jump_functions (void)
3903 {
3904 struct cgraph_node *node;
3905 struct output_block *ob;
3906 unsigned int count = 0;
3907 lto_symtab_encoder_iterator lsei;
3908 lto_symtab_encoder_t encoder;
3909
3910
3911 if (!ipa_node_params_vector.exists ())
3912 return;
3913
3914 ob = create_output_block (LTO_section_jump_functions);
3915 encoder = ob->decl_state->symtab_node_encoder;
3916 ob->cgraph_node = NULL;
3917 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3918 lsei_next_function_in_partition (&lsei))
3919 {
3920 node = lsei_cgraph_node (lsei);
3921 if (cgraph_function_with_gimple_body_p (node)
3922 && IPA_NODE_REF (node) != NULL)
3923 count++;
3924 }
3925
3926 streamer_write_uhwi (ob, count);
3927
3928 /* Process all of the functions. */
3929 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3930 lsei_next_function_in_partition (&lsei))
3931 {
3932 node = lsei_cgraph_node (lsei);
3933 if (cgraph_function_with_gimple_body_p (node)
3934 && IPA_NODE_REF (node) != NULL)
3935 ipa_write_node_info (ob, node);
3936 }
3937 streamer_write_char_stream (ob->main_stream, 0);
3938 produce_asm (ob, NULL);
3939 destroy_output_block (ob);
3940 }
3941
3942 /* Read section in file FILE_DATA of length LEN with data DATA. */
3943
3944 static void
3945 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3946 size_t len)
3947 {
3948 const struct lto_function_header *header =
3949 (const struct lto_function_header *) data;
3950 const int cfg_offset = sizeof (struct lto_function_header);
3951 const int main_offset = cfg_offset + header->cfg_size;
3952 const int string_offset = main_offset + header->main_size;
3953 struct data_in *data_in;
3954 struct lto_input_block ib_main;
3955 unsigned int i;
3956 unsigned int count;
3957
3958 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3959 header->main_size);
3960
3961 data_in =
3962 lto_data_in_create (file_data, (const char *) data + string_offset,
3963 header->string_size, vNULL);
3964 count = streamer_read_uhwi (&ib_main);
3965
3966 for (i = 0; i < count; i++)
3967 {
3968 unsigned int index;
3969 struct cgraph_node *node;
3970 lto_symtab_encoder_t encoder;
3971
3972 index = streamer_read_uhwi (&ib_main);
3973 encoder = file_data->symtab_node_encoder;
3974 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3975 gcc_assert (node->symbol.definition);
3976 ipa_read_node_info (&ib_main, node, data_in);
3977 }
3978 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3979 len);
3980 lto_data_in_delete (data_in);
3981 }
3982
3983 /* Read ipcp jump functions. */
3984
3985 void
3986 ipa_prop_read_jump_functions (void)
3987 {
3988 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3989 struct lto_file_decl_data *file_data;
3990 unsigned int j = 0;
3991
3992 ipa_check_create_node_params ();
3993 ipa_check_create_edge_args ();
3994 ipa_register_cgraph_hooks ();
3995
3996 while ((file_data = file_data_vec[j++]))
3997 {
3998 size_t len;
3999 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4000
4001 if (data)
4002 ipa_prop_read_section (file_data, data, len);
4003 }
4004 }
4005
4006 /* After merging units, we can get mismatch in argument counts.
4007 Also decl merging might've rendered parameter lists obsolete.
4008 Also compute called_with_variable_arg info. */
4009
4010 void
4011 ipa_update_after_lto_read (void)
4012 {
4013 struct cgraph_node *node;
4014
4015 ipa_check_create_node_params ();
4016 ipa_check_create_edge_args ();
4017
4018 FOR_EACH_DEFINED_FUNCTION (node)
4019 ipa_initialize_node_params (node);
4020 }
4021
4022 void
4023 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4024 {
4025 int node_ref;
4026 unsigned int count = 0;
4027 lto_symtab_encoder_t encoder;
4028 struct ipa_agg_replacement_value *aggvals, *av;
4029
4030 aggvals = ipa_get_agg_replacements_for_node (node);
4031 encoder = ob->decl_state->symtab_node_encoder;
4032 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4033 streamer_write_uhwi (ob, node_ref);
4034
4035 for (av = aggvals; av; av = av->next)
4036 count++;
4037 streamer_write_uhwi (ob, count);
4038
4039 for (av = aggvals; av; av = av->next)
4040 {
4041 struct bitpack_d bp;
4042
4043 streamer_write_uhwi (ob, av->offset);
4044 streamer_write_uhwi (ob, av->index);
4045 stream_write_tree (ob, av->value, true);
4046
4047 bp = bitpack_create (ob->main_stream);
4048 bp_pack_value (&bp, av->by_ref, 1);
4049 streamer_write_bitpack (&bp);
4050 }
4051 }
4052
4053 /* Stream in the aggregate value replacement chain for NODE from IB. */
4054
4055 static void
4056 read_agg_replacement_chain (struct lto_input_block *ib,
4057 struct cgraph_node *node,
4058 struct data_in *data_in)
4059 {
4060 struct ipa_agg_replacement_value *aggvals = NULL;
4061 unsigned int count, i;
4062
4063 count = streamer_read_uhwi (ib);
4064 for (i = 0; i <count; i++)
4065 {
4066 struct ipa_agg_replacement_value *av;
4067 struct bitpack_d bp;
4068
4069 av = ggc_alloc_ipa_agg_replacement_value ();
4070 av->offset = streamer_read_uhwi (ib);
4071 av->index = streamer_read_uhwi (ib);
4072 av->value = stream_read_tree (ib, data_in);
4073 bp = streamer_read_bitpack (ib);
4074 av->by_ref = bp_unpack_value (&bp, 1);
4075 av->next = aggvals;
4076 aggvals = av;
4077 }
4078 ipa_set_node_agg_value_chain (node, aggvals);
4079 }
4080
4081 /* Write all aggregate replacement for nodes in set. */
4082
4083 void
4084 ipa_prop_write_all_agg_replacement (void)
4085 {
4086 struct cgraph_node *node;
4087 struct output_block *ob;
4088 unsigned int count = 0;
4089 lto_symtab_encoder_iterator lsei;
4090 lto_symtab_encoder_t encoder;
4091
4092 if (!ipa_node_agg_replacements)
4093 return;
4094
4095 ob = create_output_block (LTO_section_ipcp_transform);
4096 encoder = ob->decl_state->symtab_node_encoder;
4097 ob->cgraph_node = NULL;
4098 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4099 lsei_next_function_in_partition (&lsei))
4100 {
4101 node = lsei_cgraph_node (lsei);
4102 if (cgraph_function_with_gimple_body_p (node)
4103 && ipa_get_agg_replacements_for_node (node) != NULL)
4104 count++;
4105 }
4106
4107 streamer_write_uhwi (ob, count);
4108
4109 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4110 lsei_next_function_in_partition (&lsei))
4111 {
4112 node = lsei_cgraph_node (lsei);
4113 if (cgraph_function_with_gimple_body_p (node)
4114 && ipa_get_agg_replacements_for_node (node) != NULL)
4115 write_agg_replacement_chain (ob, node);
4116 }
4117 streamer_write_char_stream (ob->main_stream, 0);
4118 produce_asm (ob, NULL);
4119 destroy_output_block (ob);
4120 }
4121
4122 /* Read replacements section in file FILE_DATA of length LEN with data
4123 DATA. */
4124
4125 static void
4126 read_replacements_section (struct lto_file_decl_data *file_data,
4127 const char *data,
4128 size_t len)
4129 {
4130 const struct lto_function_header *header =
4131 (const struct lto_function_header *) data;
4132 const int cfg_offset = sizeof (struct lto_function_header);
4133 const int main_offset = cfg_offset + header->cfg_size;
4134 const int string_offset = main_offset + header->main_size;
4135 struct data_in *data_in;
4136 struct lto_input_block ib_main;
4137 unsigned int i;
4138 unsigned int count;
4139
4140 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4141 header->main_size);
4142
4143 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4144 header->string_size, vNULL);
4145 count = streamer_read_uhwi (&ib_main);
4146
4147 for (i = 0; i < count; i++)
4148 {
4149 unsigned int index;
4150 struct cgraph_node *node;
4151 lto_symtab_encoder_t encoder;
4152
4153 index = streamer_read_uhwi (&ib_main);
4154 encoder = file_data->symtab_node_encoder;
4155 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4156 gcc_assert (node->symbol.definition);
4157 read_agg_replacement_chain (&ib_main, node, data_in);
4158 }
4159 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4160 len);
4161 lto_data_in_delete (data_in);
4162 }
4163
4164 /* Read IPA-CP aggregate replacements. */
4165
4166 void
4167 ipa_prop_read_all_agg_replacement (void)
4168 {
4169 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4170 struct lto_file_decl_data *file_data;
4171 unsigned int j = 0;
4172
4173 while ((file_data = file_data_vec[j++]))
4174 {
4175 size_t len;
4176 const char *data = lto_get_section_data (file_data,
4177 LTO_section_ipcp_transform,
4178 NULL, &len);
4179 if (data)
4180 read_replacements_section (file_data, data, len);
4181 }
4182 }
4183
4184 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4185 NODE. */
4186
4187 static void
4188 adjust_agg_replacement_values (struct cgraph_node *node,
4189 struct ipa_agg_replacement_value *aggval)
4190 {
4191 struct ipa_agg_replacement_value *v;
4192 int i, c = 0, d = 0, *adj;
4193
4194 if (!node->clone.combined_args_to_skip)
4195 return;
4196
4197 for (v = aggval; v; v = v->next)
4198 {
4199 gcc_assert (v->index >= 0);
4200 if (c < v->index)
4201 c = v->index;
4202 }
4203 c++;
4204
4205 adj = XALLOCAVEC (int, c);
4206 for (i = 0; i < c; i++)
4207 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4208 {
4209 adj[i] = -1;
4210 d++;
4211 }
4212 else
4213 adj[i] = i - d;
4214
4215 for (v = aggval; v; v = v->next)
4216 v->index = adj[v->index];
4217 }
4218
4219
4220 /* Function body transformation phase. */
4221
4222 unsigned int
4223 ipcp_transform_function (struct cgraph_node *node)
4224 {
4225 vec<ipa_param_descriptor_t> descriptors = vNULL;
4226 struct param_analysis_info *parms_ainfo;
4227 struct ipa_agg_replacement_value *aggval;
4228 gimple_stmt_iterator gsi;
4229 basic_block bb;
4230 int param_count;
4231 bool cfg_changed = false, something_changed = false;
4232
4233 gcc_checking_assert (cfun);
4234 gcc_checking_assert (current_function_decl);
4235
4236 if (dump_file)
4237 fprintf (dump_file, "Modification phase of node %s/%i\n",
4238 cgraph_node_name (node), node->symbol.order);
4239
4240 aggval = ipa_get_agg_replacements_for_node (node);
4241 if (!aggval)
4242 return 0;
4243 param_count = count_formal_params (node->symbol.decl);
4244 if (param_count == 0)
4245 return 0;
4246 adjust_agg_replacement_values (node, aggval);
4247 if (dump_file)
4248 ipa_dump_agg_replacement_values (dump_file, aggval);
4249 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4250 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4251 descriptors.safe_grow_cleared (param_count);
4252 ipa_populate_param_decls (node, descriptors);
4253
4254 FOR_EACH_BB (bb)
4255 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4256 {
4257 struct ipa_agg_replacement_value *v;
4258 gimple stmt = gsi_stmt (gsi);
4259 tree rhs, val, t;
4260 HOST_WIDE_INT offset;
4261 int index;
4262 bool by_ref, vce;
4263
4264 if (!gimple_assign_load_p (stmt))
4265 continue;
4266 rhs = gimple_assign_rhs1 (stmt);
4267 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4268 continue;
4269
4270 vce = false;
4271 t = rhs;
4272 while (handled_component_p (t))
4273 {
4274 /* V_C_E can do things like convert an array of integers to one
4275 bigger integer and similar things we do not handle below. */
4276 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4277 {
4278 vce = true;
4279 break;
4280 }
4281 t = TREE_OPERAND (t, 0);
4282 }
4283 if (vce)
4284 continue;
4285
4286 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4287 rhs, &index, &offset, &by_ref))
4288 continue;
4289 for (v = aggval; v; v = v->next)
4290 if (v->index == index
4291 && v->offset == offset)
4292 break;
4293 if (!v || v->by_ref != by_ref)
4294 continue;
4295
4296 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4297 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4298 {
4299 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4300 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4301 else if (TYPE_SIZE (TREE_TYPE (rhs))
4302 == TYPE_SIZE (TREE_TYPE (v->value)))
4303 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4304 else
4305 {
4306 if (dump_file)
4307 {
4308 fprintf (dump_file, " const ");
4309 print_generic_expr (dump_file, v->value, 0);
4310 fprintf (dump_file, " can't be converted to type of ");
4311 print_generic_expr (dump_file, rhs, 0);
4312 fprintf (dump_file, "\n");
4313 }
4314 continue;
4315 }
4316 }
4317 else
4318 val = v->value;
4319
4320 if (dump_file && (dump_flags & TDF_DETAILS))
4321 {
4322 fprintf (dump_file, "Modifying stmt:\n ");
4323 print_gimple_stmt (dump_file, stmt, 0, 0);
4324 }
4325 gimple_assign_set_rhs_from_tree (&gsi, val);
4326 update_stmt (stmt);
4327
4328 if (dump_file && (dump_flags & TDF_DETAILS))
4329 {
4330 fprintf (dump_file, "into:\n ");
4331 print_gimple_stmt (dump_file, stmt, 0, 0);
4332 fprintf (dump_file, "\n");
4333 }
4334
4335 something_changed = true;
4336 if (maybe_clean_eh_stmt (stmt)
4337 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4338 cfg_changed = true;
4339 }
4340
4341 (*ipa_node_agg_replacements)[node->uid] = NULL;
4342 free_parms_ainfo (parms_ainfo, param_count);
4343 descriptors.release ();
4344
4345 if (!something_changed)
4346 return 0;
4347 else if (cfg_changed)
4348 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4349 else
4350 return TODO_update_ssa_only_virtuals;
4351 }