ipa-prop.c (update_jump_functions_after_inlining): When type is not preserverd by...
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60
61 /* Intermediate information about a parameter that is only useful during the
62 run of ipa_analyze_node and is not kept afterwards. */
63
64 struct param_analysis_info
65 {
66 bool parm_modified, ref_modified, pt_modified;
67 bitmap parm_visited_statements, pt_visited_statements;
68 };
69
70 /* Vector where the parameter infos are actually stored. */
71 vec<ipa_node_params> ipa_node_params_vector;
72 /* Vector of known aggregate values in cloned nodes. */
73 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
74 /* Vector where the parameter infos are actually stored. */
75 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
76
77 /* Holders of ipa cgraph hooks: */
78 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
79 static struct cgraph_node_hook_list *node_removal_hook_holder;
80 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
81 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
82 static struct cgraph_node_hook_list *function_insertion_hook_holder;
83
84 /* Description of a reference to an IPA constant. */
85 struct ipa_cst_ref_desc
86 {
87 /* Edge that corresponds to the statement which took the reference. */
88 struct cgraph_edge *cs;
89 /* Linked list of duplicates created when call graph edges are cloned. */
90 struct ipa_cst_ref_desc *next_duplicate;
91 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
92 if out of control. */
93 int refcount;
94 };
95
96 /* Allocation pool for reference descriptions. */
97
98 static alloc_pool ipa_refdesc_pool;
99
100 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
101 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
102
103 static bool
104 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
105 {
106 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
107 struct cl_optimization *os;
108
109 if (!fs_opts)
110 return false;
111 os = TREE_OPTIMIZATION (fs_opts);
112 return !os->x_optimize || !os->x_flag_ipa_cp;
113 }
114
115 /* Return index of the formal whose tree is PTREE in function which corresponds
116 to INFO. */
117
118 static int
119 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
120 {
121 int i, count;
122
123 count = descriptors.length ();
124 for (i = 0; i < count; i++)
125 if (descriptors[i].decl == ptree)
126 return i;
127
128 return -1;
129 }
130
131 /* Return index of the formal whose tree is PTREE in function which corresponds
132 to INFO. */
133
134 int
135 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
136 {
137 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
138 }
139
140 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
141 NODE. */
142
143 static void
144 ipa_populate_param_decls (struct cgraph_node *node,
145 vec<ipa_param_descriptor> &descriptors)
146 {
147 tree fndecl;
148 tree fnargs;
149 tree parm;
150 int param_num;
151
152 fndecl = node->decl;
153 gcc_assert (gimple_has_body_p (fndecl));
154 fnargs = DECL_ARGUMENTS (fndecl);
155 param_num = 0;
156 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
157 {
158 descriptors[param_num].decl = parm;
159 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
160 param_num++;
161 }
162 }
163
164 /* Return how many formal parameters FNDECL has. */
165
166 static inline int
167 count_formal_params (tree fndecl)
168 {
169 tree parm;
170 int count = 0;
171 gcc_assert (gimple_has_body_p (fndecl));
172
173 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
174 count++;
175
176 return count;
177 }
178
179 /* Return the declaration of Ith formal parameter of the function corresponding
180 to INFO. Note there is no setter function as this array is built just once
181 using ipa_initialize_node_params. */
182
183 void
184 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
185 {
186 fprintf (file, "param #%i", i);
187 if (info->descriptors[i].decl)
188 {
189 fprintf (file, " ");
190 print_generic_expr (file, info->descriptors[i].decl, 0);
191 }
192 }
193
194 /* Initialize the ipa_node_params structure associated with NODE
195 to hold PARAM_COUNT parameters. */
196
197 void
198 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
199 {
200 struct ipa_node_params *info = IPA_NODE_REF (node);
201
202 if (!info->descriptors.exists () && param_count)
203 info->descriptors.safe_grow_cleared (param_count);
204 }
205
206 /* Initialize the ipa_node_params structure associated with NODE by counting
207 the function parameters, creating the descriptors and populating their
208 param_decls. */
209
210 void
211 ipa_initialize_node_params (struct cgraph_node *node)
212 {
213 struct ipa_node_params *info = IPA_NODE_REF (node);
214
215 if (!info->descriptors.exists ())
216 {
217 ipa_alloc_node_params (node, count_formal_params (node->decl));
218 ipa_populate_param_decls (node, info->descriptors);
219 }
220 }
221
222 /* Print the jump functions associated with call graph edge CS to file F. */
223
224 static void
225 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
226 {
227 int i, count;
228
229 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
230 for (i = 0; i < count; i++)
231 {
232 struct ipa_jump_func *jump_func;
233 enum jump_func_type type;
234
235 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
236 type = jump_func->type;
237
238 fprintf (f, " param %d: ", i);
239 if (type == IPA_JF_UNKNOWN)
240 fprintf (f, "UNKNOWN\n");
241 else if (type == IPA_JF_KNOWN_TYPE)
242 {
243 fprintf (f, "KNOWN TYPE: base ");
244 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
245 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
246 jump_func->value.known_type.offset);
247 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
248 fprintf (f, "\n");
249 }
250 else if (type == IPA_JF_CONST)
251 {
252 tree val = jump_func->value.constant.value;
253 fprintf (f, "CONST: ");
254 print_generic_expr (f, val, 0);
255 if (TREE_CODE (val) == ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
257 {
258 fprintf (f, " -> ");
259 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
260 0);
261 }
262 fprintf (f, "\n");
263 }
264 else if (type == IPA_JF_PASS_THROUGH)
265 {
266 fprintf (f, "PASS THROUGH: ");
267 fprintf (f, "%d, op %s",
268 jump_func->value.pass_through.formal_id,
269 get_tree_code_name(jump_func->value.pass_through.operation));
270 if (jump_func->value.pass_through.operation != NOP_EXPR)
271 {
272 fprintf (f, " ");
273 print_generic_expr (f,
274 jump_func->value.pass_through.operand, 0);
275 }
276 if (jump_func->value.pass_through.agg_preserved)
277 fprintf (f, ", agg_preserved");
278 if (jump_func->value.pass_through.type_preserved)
279 fprintf (f, ", type_preserved");
280 fprintf (f, "\n");
281 }
282 else if (type == IPA_JF_ANCESTOR)
283 {
284 fprintf (f, "ANCESTOR: ");
285 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
286 jump_func->value.ancestor.formal_id,
287 jump_func->value.ancestor.offset);
288 print_generic_expr (f, jump_func->value.ancestor.type, 0);
289 if (jump_func->value.ancestor.agg_preserved)
290 fprintf (f, ", agg_preserved");
291 if (jump_func->value.ancestor.type_preserved)
292 fprintf (f, ", type_preserved");
293 fprintf (f, "\n");
294 }
295
296 if (jump_func->agg.items)
297 {
298 struct ipa_agg_jf_item *item;
299 int j;
300
301 fprintf (f, " Aggregate passed by %s:\n",
302 jump_func->agg.by_ref ? "reference" : "value");
303 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
304 {
305 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
306 item->offset);
307 if (TYPE_P (item->value))
308 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
309 tree_to_uhwi (TYPE_SIZE (item->value)));
310 else
311 {
312 fprintf (f, "cst: ");
313 print_generic_expr (f, item->value, 0);
314 }
315 fprintf (f, "\n");
316 }
317 }
318 }
319 }
320
321
322 /* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
324
325 void
326 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
327 {
328 struct cgraph_edge *cs;
329
330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
331 node->order);
332 for (cs = node->callees; cs; cs = cs->next_callee)
333 {
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
336
337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
338 xstrdup (node->name ()), node->order,
339 xstrdup (cs->callee->name ()),
340 cs->callee->order);
341 ipa_print_node_jump_functions_for_edge (f, cs);
342 }
343
344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
345 {
346 struct cgraph_indirect_call_info *ii;
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
349
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
352 fprintf (f, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
354 ii->member_ptr ? "member ptr" : "aggregate",
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
358 fprintf (f, " indirect %s callsite, calling param %i",
359 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
360
361 if (cs->call_stmt)
362 {
363 fprintf (f, ", for stmt ");
364 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
365 }
366 else
367 fprintf (f, "\n");
368 ipa_print_node_jump_functions_for_edge (f, cs);
369 }
370 }
371
372 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
373
374 void
375 ipa_print_all_jump_functions (FILE *f)
376 {
377 struct cgraph_node *node;
378
379 fprintf (f, "\nJump functions:\n");
380 FOR_EACH_FUNCTION (node)
381 {
382 ipa_print_node_jump_functions (f, node);
383 }
384 }
385
386 /* Set JFUNC to be a known type jump function. */
387
388 static void
389 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
390 tree base_type, tree component_type)
391 {
392 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
393 && TYPE_BINFO (component_type));
394 jfunc->type = IPA_JF_KNOWN_TYPE;
395 jfunc->value.known_type.offset = offset,
396 jfunc->value.known_type.base_type = base_type;
397 jfunc->value.known_type.component_type = component_type;
398 gcc_assert (component_type);
399 }
400
401 /* Set JFUNC to be a copy of another jmp (to be used by jump function
402 combination code). The two functions will share their rdesc. */
403
404 static void
405 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
406 struct ipa_jump_func *src)
407
408 {
409 gcc_checking_assert (src->type == IPA_JF_CONST);
410 dst->type = IPA_JF_CONST;
411 dst->value.constant = src->value.constant;
412 }
413
414 /* Set JFUNC to be a constant jmp function. */
415
416 static void
417 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
418 struct cgraph_edge *cs)
419 {
420 constant = unshare_expr (constant);
421 if (constant && EXPR_P (constant))
422 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
423 jfunc->type = IPA_JF_CONST;
424 jfunc->value.constant.value = unshare_expr_without_location (constant);
425
426 if (TREE_CODE (constant) == ADDR_EXPR
427 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
428 {
429 struct ipa_cst_ref_desc *rdesc;
430 if (!ipa_refdesc_pool)
431 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
432 sizeof (struct ipa_cst_ref_desc), 32);
433
434 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
435 rdesc->cs = cs;
436 rdesc->next_duplicate = NULL;
437 rdesc->refcount = 1;
438 jfunc->value.constant.rdesc = rdesc;
439 }
440 else
441 jfunc->value.constant.rdesc = NULL;
442 }
443
444 /* Set JFUNC to be a simple pass-through jump function. */
445 static void
446 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
447 bool agg_preserved, bool type_preserved)
448 {
449 jfunc->type = IPA_JF_PASS_THROUGH;
450 jfunc->value.pass_through.operand = NULL_TREE;
451 jfunc->value.pass_through.formal_id = formal_id;
452 jfunc->value.pass_through.operation = NOP_EXPR;
453 jfunc->value.pass_through.agg_preserved = agg_preserved;
454 jfunc->value.pass_through.type_preserved = type_preserved;
455 }
456
457 /* Set JFUNC to be an arithmetic pass through jump function. */
458
459 static void
460 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
461 tree operand, enum tree_code operation)
462 {
463 jfunc->type = IPA_JF_PASS_THROUGH;
464 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
465 jfunc->value.pass_through.formal_id = formal_id;
466 jfunc->value.pass_through.operation = operation;
467 jfunc->value.pass_through.agg_preserved = false;
468 jfunc->value.pass_through.type_preserved = false;
469 }
470
471 /* Set JFUNC to be an ancestor jump function. */
472
473 static void
474 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
475 tree type, int formal_id, bool agg_preserved,
476 bool type_preserved)
477 {
478 jfunc->type = IPA_JF_ANCESTOR;
479 jfunc->value.ancestor.formal_id = formal_id;
480 jfunc->value.ancestor.offset = offset;
481 jfunc->value.ancestor.type = type;
482 jfunc->value.ancestor.agg_preserved = agg_preserved;
483 jfunc->value.ancestor.type_preserved = type_preserved;
484 }
485
486 /* Extract the acual BINFO being described by JFUNC which must be a known type
487 jump function. */
488
489 tree
490 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
491 {
492 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
493 if (!base_binfo)
494 return NULL_TREE;
495 return get_binfo_at_offset (base_binfo,
496 jfunc->value.known_type.offset,
497 jfunc->value.known_type.component_type);
498 }
499
500 /* Structure to be passed in between detect_type_change and
501 check_stmt_for_type_change. */
502
503 struct type_change_info
504 {
505 /* Offset into the object where there is the virtual method pointer we are
506 looking for. */
507 HOST_WIDE_INT offset;
508 /* The declaration or SSA_NAME pointer of the base that we are checking for
509 type change. */
510 tree object;
511 /* If we actually can tell the type that the object has changed to, it is
512 stored in this field. Otherwise it remains NULL_TREE. */
513 tree known_current_type;
514 /* Set to true if dynamic type change has been detected. */
515 bool type_maybe_changed;
516 /* Set to true if multiple types have been encountered. known_current_type
517 must be disregarded in that case. */
518 bool multiple_types_encountered;
519 };
520
521 /* Return true if STMT can modify a virtual method table pointer.
522
523 This function makes special assumptions about both constructors and
524 destructors which are all the functions that are allowed to alter the VMT
525 pointers. It assumes that destructors begin with assignment into all VMT
526 pointers and that constructors essentially look in the following way:
527
528 1) The very first thing they do is that they call constructors of ancestor
529 sub-objects that have them.
530
531 2) Then VMT pointers of this and all its ancestors is set to new values
532 corresponding to the type corresponding to the constructor.
533
534 3) Only afterwards, other stuff such as constructor of member sub-objects
535 and the code written by the user is run. Only this may include calling
536 virtual functions, directly or indirectly.
537
538 There is no way to call a constructor of an ancestor sub-object in any
539 other way.
540
541 This means that we do not have to care whether constructors get the correct
542 type information because they will always change it (in fact, if we define
543 the type to be given by the VMT pointer, it is undefined).
544
545 The most important fact to derive from the above is that if, for some
546 statement in the section 3, we try to detect whether the dynamic type has
547 changed, we can safely ignore all calls as we examine the function body
548 backwards until we reach statements in section 2 because these calls cannot
549 be ancestor constructors or destructors (if the input is not bogus) and so
550 do not change the dynamic type (this holds true only for automatically
551 allocated objects but at the moment we devirtualize only these). We then
552 must detect that statements in section 2 change the dynamic type and can try
553 to derive the new type. That is enough and we can stop, we will never see
554 the calls into constructors of sub-objects in this code. Therefore we can
555 safely ignore all call statements that we traverse.
556 */
557
558 static bool
559 stmt_may_be_vtbl_ptr_store (gimple stmt)
560 {
561 if (is_gimple_call (stmt))
562 return false;
563 else if (gimple_clobber_p (stmt))
564 return false;
565 else if (is_gimple_assign (stmt))
566 {
567 tree lhs = gimple_assign_lhs (stmt);
568
569 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
570 {
571 if (flag_strict_aliasing
572 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
573 return false;
574
575 if (TREE_CODE (lhs) == COMPONENT_REF
576 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
577 return false;
578 /* In the future we might want to use get_base_ref_and_offset to find
579 if there is a field corresponding to the offset and if so, proceed
580 almost like if it was a component ref. */
581 }
582 }
583 return true;
584 }
585
586 /* If STMT can be proved to be an assignment to the virtual method table
587 pointer of ANALYZED_OBJ and the type associated with the new table
588 identified, return the type. Otherwise return NULL_TREE. */
589
590 static tree
591 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
592 {
593 HOST_WIDE_INT offset, size, max_size;
594 tree lhs, rhs, base;
595
596 if (!gimple_assign_single_p (stmt))
597 return NULL_TREE;
598
599 lhs = gimple_assign_lhs (stmt);
600 rhs = gimple_assign_rhs1 (stmt);
601 if (TREE_CODE (lhs) != COMPONENT_REF
602 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
603 || TREE_CODE (rhs) != ADDR_EXPR)
604 return NULL_TREE;
605 rhs = get_base_address (TREE_OPERAND (rhs, 0));
606 if (!rhs
607 || TREE_CODE (rhs) != VAR_DECL
608 || !DECL_VIRTUAL_P (rhs))
609 return NULL_TREE;
610
611 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
612 if (offset != tci->offset
613 || size != POINTER_SIZE
614 || max_size != POINTER_SIZE)
615 return NULL_TREE;
616 if (TREE_CODE (base) == MEM_REF)
617 {
618 if (TREE_CODE (tci->object) != MEM_REF
619 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
620 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
621 TREE_OPERAND (base, 1)))
622 return NULL_TREE;
623 }
624 else if (tci->object != base)
625 return NULL_TREE;
626
627 return DECL_CONTEXT (rhs);
628 }
629
630 /* Callback of walk_aliased_vdefs and a helper function for
631 detect_type_change to check whether a particular statement may modify
632 the virtual table pointer, and if possible also determine the new type of
633 the (sub-)object. It stores its result into DATA, which points to a
634 type_change_info structure. */
635
636 static bool
637 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
638 {
639 gimple stmt = SSA_NAME_DEF_STMT (vdef);
640 struct type_change_info *tci = (struct type_change_info *) data;
641
642 if (stmt_may_be_vtbl_ptr_store (stmt))
643 {
644 tree type;
645 type = extr_type_from_vtbl_ptr_store (stmt, tci);
646 if (tci->type_maybe_changed
647 && type != tci->known_current_type)
648 tci->multiple_types_encountered = true;
649 tci->known_current_type = type;
650 tci->type_maybe_changed = true;
651 return true;
652 }
653 else
654 return false;
655 }
656
657
658
659 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
660 callsite CALL) by looking for assignments to its virtual table pointer. If
661 it is, return true and fill in the jump function JFUNC with relevant type
662 information or set it to unknown. ARG is the object itself (not a pointer
663 to it, unless dereferenced). BASE is the base of the memory access as
664 returned by get_ref_base_and_extent, as is the offset. */
665
666 static bool
667 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
668 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
669 {
670 struct type_change_info tci;
671 ao_ref ao;
672
673 gcc_checking_assert (DECL_P (arg)
674 || TREE_CODE (arg) == MEM_REF
675 || handled_component_p (arg));
676 /* Const calls cannot call virtual methods through VMT and so type changes do
677 not matter. */
678 if (!flag_devirtualize || !gimple_vuse (call)
679 /* Be sure expected_type is polymorphic. */
680 || !comp_type
681 || TREE_CODE (comp_type) != RECORD_TYPE
682 || !TYPE_BINFO (comp_type)
683 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
684 return false;
685
686 ao_ref_init (&ao, arg);
687 ao.base = base;
688 ao.offset = offset;
689 ao.size = POINTER_SIZE;
690 ao.max_size = ao.size;
691
692 tci.offset = offset;
693 tci.object = get_base_address (arg);
694 tci.known_current_type = NULL_TREE;
695 tci.type_maybe_changed = false;
696 tci.multiple_types_encountered = false;
697
698 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
699 &tci, NULL);
700 if (!tci.type_maybe_changed)
701 return false;
702
703 if (!tci.known_current_type
704 || tci.multiple_types_encountered
705 || offset != 0)
706 jfunc->type = IPA_JF_UNKNOWN;
707 else
708 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
709
710 return true;
711 }
712
713 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
714 SSA name (its dereference will become the base and the offset is assumed to
715 be zero). */
716
717 static bool
718 detect_type_change_ssa (tree arg, tree comp_type,
719 gimple call, struct ipa_jump_func *jfunc)
720 {
721 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
722 if (!flag_devirtualize
723 || !POINTER_TYPE_P (TREE_TYPE (arg)))
724 return false;
725
726 arg = build2 (MEM_REF, ptr_type_node, arg,
727 build_int_cst (ptr_type_node, 0));
728
729 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
730 }
731
732 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
733 boolean variable pointed to by DATA. */
734
735 static bool
736 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
737 void *data)
738 {
739 bool *b = (bool *) data;
740 *b = true;
741 return true;
742 }
743
744 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
745 a value known not to be modified in this function before reaching the
746 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
747 information about the parameter. */
748
749 static bool
750 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
751 gimple stmt, tree parm_load)
752 {
753 bool modified = false;
754 bitmap *visited_stmts;
755 ao_ref refd;
756
757 if (parm_ainfo && parm_ainfo->parm_modified)
758 return false;
759
760 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
761 ao_ref_init (&refd, parm_load);
762 /* We can cache visited statements only when parm_ainfo is available and when
763 we are looking at a naked load of the whole parameter. */
764 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
765 visited_stmts = NULL;
766 else
767 visited_stmts = &parm_ainfo->parm_visited_statements;
768 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
769 visited_stmts);
770 if (parm_ainfo && modified)
771 parm_ainfo->parm_modified = true;
772 return !modified;
773 }
774
775 /* If STMT is an assignment that loads a value from an parameter declaration,
776 return the index of the parameter in ipa_node_params which has not been
777 modified. Otherwise return -1. */
778
779 static int
780 load_from_unmodified_param (vec<ipa_param_descriptor> descriptors,
781 struct param_analysis_info *parms_ainfo,
782 gimple stmt)
783 {
784 int index;
785 tree op1;
786
787 if (!gimple_assign_single_p (stmt))
788 return -1;
789
790 op1 = gimple_assign_rhs1 (stmt);
791 if (TREE_CODE (op1) != PARM_DECL)
792 return -1;
793
794 index = ipa_get_param_decl_index_1 (descriptors, op1);
795 if (index < 0
796 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
797 : NULL, stmt, op1))
798 return -1;
799
800 return index;
801 }
802
803 /* Return true if memory reference REF loads data that are known to be
804 unmodified in this function before reaching statement STMT. PARM_AINFO, if
805 non-NULL, is a pointer to a structure containing temporary information about
806 PARM. */
807
808 static bool
809 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
810 gimple stmt, tree ref)
811 {
812 bool modified = false;
813 ao_ref refd;
814
815 gcc_checking_assert (gimple_vuse (stmt));
816 if (parm_ainfo && parm_ainfo->ref_modified)
817 return false;
818
819 ao_ref_init (&refd, ref);
820 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
821 NULL);
822 if (parm_ainfo && modified)
823 parm_ainfo->ref_modified = true;
824 return !modified;
825 }
826
827 /* Return true if the data pointed to by PARM is known to be unmodified in this
828 function before reaching call statement CALL into which it is passed.
829 PARM_AINFO is a pointer to a structure containing temporary information
830 about PARM. */
831
832 static bool
833 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
834 gimple call, tree parm)
835 {
836 bool modified = false;
837 ao_ref refd;
838
839 /* It's unnecessary to calculate anything about memory contnets for a const
840 function because it is not goin to use it. But do not cache the result
841 either. Also, no such calculations for non-pointers. */
842 if (!gimple_vuse (call)
843 || !POINTER_TYPE_P (TREE_TYPE (parm)))
844 return false;
845
846 if (parm_ainfo->pt_modified)
847 return false;
848
849 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
850 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
851 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
852 if (modified)
853 parm_ainfo->pt_modified = true;
854 return !modified;
855 }
856
857 /* Return true if we can prove that OP is a memory reference loading unmodified
858 data from an aggregate passed as a parameter and if the aggregate is passed
859 by reference, that the alias type of the load corresponds to the type of the
860 formal parameter (so that we can rely on this type for TBAA in callers).
861 INFO and PARMS_AINFO describe parameters of the current function (but the
862 latter can be NULL), STMT is the load statement. If function returns true,
863 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
864 within the aggregate and whether it is a load from a value passed by
865 reference respectively. */
866
867 static bool
868 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor> descriptors,
869 struct param_analysis_info *parms_ainfo, gimple stmt,
870 tree op, int *index_p, HOST_WIDE_INT *offset_p,
871 HOST_WIDE_INT *size_p, bool *by_ref_p)
872 {
873 int index;
874 HOST_WIDE_INT size, max_size;
875 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
876
877 if (max_size == -1 || max_size != size || *offset_p < 0)
878 return false;
879
880 if (DECL_P (base))
881 {
882 int index = ipa_get_param_decl_index_1 (descriptors, base);
883 if (index >= 0
884 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
885 : NULL, stmt, op))
886 {
887 *index_p = index;
888 *by_ref_p = false;
889 if (size_p)
890 *size_p = size;
891 return true;
892 }
893 return false;
894 }
895
896 if (TREE_CODE (base) != MEM_REF
897 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
898 || !integer_zerop (TREE_OPERAND (base, 1)))
899 return false;
900
901 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
902 {
903 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
904 index = ipa_get_param_decl_index_1 (descriptors, parm);
905 }
906 else
907 {
908 /* This branch catches situations where a pointer parameter is not a
909 gimple register, for example:
910
911 void hip7(S*) (struct S * p)
912 {
913 void (*<T2e4>) (struct S *) D.1867;
914 struct S * p.1;
915
916 <bb 2>:
917 p.1_1 = p;
918 D.1867_2 = p.1_1->f;
919 D.1867_2 ();
920 gdp = &p;
921 */
922
923 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
924 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
925 }
926
927 if (index >= 0
928 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
929 stmt, op))
930 {
931 *index_p = index;
932 *by_ref_p = true;
933 if (size_p)
934 *size_p = size;
935 return true;
936 }
937 return false;
938 }
939
940 /* Just like the previous function, just without the param_analysis_info
941 pointer, for users outside of this file. */
942
943 bool
944 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
945 tree op, int *index_p, HOST_WIDE_INT *offset_p,
946 bool *by_ref_p)
947 {
948 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
949 offset_p, NULL, by_ref_p);
950 }
951
952 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
953 of an assignment statement STMT, try to determine whether we are actually
954 handling any of the following cases and construct an appropriate jump
955 function into JFUNC if so:
956
957 1) The passed value is loaded from a formal parameter which is not a gimple
958 register (most probably because it is addressable, the value has to be
959 scalar) and we can guarantee the value has not changed. This case can
960 therefore be described by a simple pass-through jump function. For example:
961
962 foo (int a)
963 {
964 int a.0;
965
966 a.0_2 = a;
967 bar (a.0_2);
968
969 2) The passed value can be described by a simple arithmetic pass-through
970 jump function. E.g.
971
972 foo (int a)
973 {
974 int D.2064;
975
976 D.2064_4 = a.1(D) + 4;
977 bar (D.2064_4);
978
979 This case can also occur in combination of the previous one, e.g.:
980
981 foo (int a, int z)
982 {
983 int a.0;
984 int D.2064;
985
986 a.0_3 = a;
987 D.2064_4 = a.0_3 + 4;
988 foo (D.2064_4);
989
990 3) The passed value is an address of an object within another one (which
991 also passed by reference). Such situations are described by an ancestor
992 jump function and describe situations such as:
993
994 B::foo() (struct B * const this)
995 {
996 struct A * D.1845;
997
998 D.1845_2 = &this_1(D)->D.1748;
999 A::bar (D.1845_2);
1000
1001 INFO is the structure describing individual parameters access different
1002 stages of IPA optimizations. PARMS_AINFO contains the information that is
1003 only needed for intraprocedural analysis. */
1004
1005 static void
1006 compute_complex_assign_jump_func (struct ipa_node_params *info,
1007 struct param_analysis_info *parms_ainfo,
1008 struct ipa_jump_func *jfunc,
1009 gimple call, gimple stmt, tree name,
1010 tree param_type)
1011 {
1012 HOST_WIDE_INT offset, size, max_size;
1013 tree op1, tc_ssa, base, ssa;
1014 int index;
1015
1016 op1 = gimple_assign_rhs1 (stmt);
1017
1018 if (TREE_CODE (op1) == SSA_NAME)
1019 {
1020 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1021 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1022 else
1023 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
1024 SSA_NAME_DEF_STMT (op1));
1025 tc_ssa = op1;
1026 }
1027 else
1028 {
1029 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1030 tc_ssa = gimple_assign_lhs (stmt);
1031 }
1032
1033 if (index >= 0)
1034 {
1035 tree op2 = gimple_assign_rhs2 (stmt);
1036
1037 if (op2)
1038 {
1039 if (!is_gimple_ip_invariant (op2)
1040 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1041 && !useless_type_conversion_p (TREE_TYPE (name),
1042 TREE_TYPE (op1))))
1043 return;
1044
1045 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1046 gimple_assign_rhs_code (stmt));
1047 }
1048 else if (gimple_assign_single_p (stmt))
1049 {
1050 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1051 call, tc_ssa);
1052 bool type_p = false;
1053
1054 if (param_type && POINTER_TYPE_P (param_type))
1055 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1056 call, jfunc);
1057 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1058 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1059 }
1060 return;
1061 }
1062
1063 if (TREE_CODE (op1) != ADDR_EXPR)
1064 return;
1065 op1 = TREE_OPERAND (op1, 0);
1066 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1067 return;
1068 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1069 if (TREE_CODE (base) != MEM_REF
1070 /* If this is a varying address, punt. */
1071 || max_size == -1
1072 || max_size != size)
1073 return;
1074 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1075 ssa = TREE_OPERAND (base, 0);
1076 if (TREE_CODE (ssa) != SSA_NAME
1077 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1078 || offset < 0)
1079 return;
1080
1081 /* Dynamic types are changed in constructors and destructors. */
1082 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1083 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1084 {
1085 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1086 call, jfunc, offset);
1087 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1088 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1089 parm_ref_data_pass_through_p (&parms_ainfo[index],
1090 call, ssa), type_p);
1091 }
1092 }
1093
1094 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1095 it looks like:
1096
1097 iftmp.1_3 = &obj_2(D)->D.1762;
1098
1099 The base of the MEM_REF must be a default definition SSA NAME of a
1100 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1101 whole MEM_REF expression is returned and the offset calculated from any
1102 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1103 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1104
1105 static tree
1106 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1107 {
1108 HOST_WIDE_INT size, max_size;
1109 tree expr, parm, obj;
1110
1111 if (!gimple_assign_single_p (assign))
1112 return NULL_TREE;
1113 expr = gimple_assign_rhs1 (assign);
1114
1115 if (TREE_CODE (expr) != ADDR_EXPR)
1116 return NULL_TREE;
1117 expr = TREE_OPERAND (expr, 0);
1118 obj = expr;
1119 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1120
1121 if (TREE_CODE (expr) != MEM_REF
1122 /* If this is a varying address, punt. */
1123 || max_size == -1
1124 || max_size != size
1125 || *offset < 0)
1126 return NULL_TREE;
1127 parm = TREE_OPERAND (expr, 0);
1128 if (TREE_CODE (parm) != SSA_NAME
1129 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1130 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1131 return NULL_TREE;
1132
1133 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1134 *obj_p = obj;
1135 return expr;
1136 }
1137
1138
1139 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1140 statement PHI, try to find out whether NAME is in fact a
1141 multiple-inheritance typecast from a descendant into an ancestor of a formal
1142 parameter and thus can be described by an ancestor jump function and if so,
1143 write the appropriate function into JFUNC.
1144
1145 Essentially we want to match the following pattern:
1146
1147 if (obj_2(D) != 0B)
1148 goto <bb 3>;
1149 else
1150 goto <bb 4>;
1151
1152 <bb 3>:
1153 iftmp.1_3 = &obj_2(D)->D.1762;
1154
1155 <bb 4>:
1156 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1157 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1158 return D.1879_6; */
1159
1160 static void
1161 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1162 struct param_analysis_info *parms_ainfo,
1163 struct ipa_jump_func *jfunc,
1164 gimple call, gimple phi, tree param_type)
1165 {
1166 HOST_WIDE_INT offset;
1167 gimple assign, cond;
1168 basic_block phi_bb, assign_bb, cond_bb;
1169 tree tmp, parm, expr, obj;
1170 int index, i;
1171
1172 if (gimple_phi_num_args (phi) != 2)
1173 return;
1174
1175 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1176 tmp = PHI_ARG_DEF (phi, 0);
1177 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1178 tmp = PHI_ARG_DEF (phi, 1);
1179 else
1180 return;
1181 if (TREE_CODE (tmp) != SSA_NAME
1182 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1183 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1184 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1185 return;
1186
1187 assign = SSA_NAME_DEF_STMT (tmp);
1188 assign_bb = gimple_bb (assign);
1189 if (!single_pred_p (assign_bb))
1190 return;
1191 expr = get_ancestor_addr_info (assign, &obj, &offset);
1192 if (!expr)
1193 return;
1194 parm = TREE_OPERAND (expr, 0);
1195 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1196 gcc_assert (index >= 0);
1197
1198 cond_bb = single_pred (assign_bb);
1199 cond = last_stmt (cond_bb);
1200 if (!cond
1201 || gimple_code (cond) != GIMPLE_COND
1202 || gimple_cond_code (cond) != NE_EXPR
1203 || gimple_cond_lhs (cond) != parm
1204 || !integer_zerop (gimple_cond_rhs (cond)))
1205 return;
1206
1207 phi_bb = gimple_bb (phi);
1208 for (i = 0; i < 2; i++)
1209 {
1210 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1211 if (pred != assign_bb && pred != cond_bb)
1212 return;
1213 }
1214
1215 bool type_p = false;
1216 if (param_type && POINTER_TYPE_P (param_type))
1217 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1218 call, jfunc, offset);
1219 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1220 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1221 parm_ref_data_pass_through_p (&parms_ainfo[index],
1222 call, parm), type_p);
1223 }
1224
1225 /* Given OP which is passed as an actual argument to a called function,
1226 determine if it is possible to construct a KNOWN_TYPE jump function for it
1227 and if so, create one and store it to JFUNC.
1228 EXPECTED_TYPE represents a type the argument should be in */
1229
1230 static void
1231 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1232 gimple call, tree expected_type)
1233 {
1234 HOST_WIDE_INT offset, size, max_size;
1235 tree base;
1236
1237 if (!flag_devirtualize
1238 || TREE_CODE (op) != ADDR_EXPR
1239 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1240 /* Be sure expected_type is polymorphic. */
1241 || !expected_type
1242 || TREE_CODE (expected_type) != RECORD_TYPE
1243 || !TYPE_BINFO (expected_type)
1244 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1245 return;
1246
1247 op = TREE_OPERAND (op, 0);
1248 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1249 if (!DECL_P (base)
1250 || max_size == -1
1251 || max_size != size
1252 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1253 || is_global_var (base))
1254 return;
1255
1256 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1257 return;
1258
1259 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1260 expected_type);
1261 }
1262
1263 /* Inspect the given TYPE and return true iff it has the same structure (the
1264 same number of fields of the same types) as a C++ member pointer. If
1265 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1266 corresponding fields there. */
1267
1268 static bool
1269 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1270 {
1271 tree fld;
1272
1273 if (TREE_CODE (type) != RECORD_TYPE)
1274 return false;
1275
1276 fld = TYPE_FIELDS (type);
1277 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1278 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1279 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1280 return false;
1281
1282 if (method_ptr)
1283 *method_ptr = fld;
1284
1285 fld = DECL_CHAIN (fld);
1286 if (!fld || INTEGRAL_TYPE_P (fld)
1287 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1288 return false;
1289 if (delta)
1290 *delta = fld;
1291
1292 if (DECL_CHAIN (fld))
1293 return false;
1294
1295 return true;
1296 }
1297
1298 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1299 return the rhs of its defining statement. Otherwise return RHS as it
1300 is. */
1301
1302 static inline tree
1303 get_ssa_def_if_simple_copy (tree rhs)
1304 {
1305 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1306 {
1307 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1308
1309 if (gimple_assign_single_p (def_stmt))
1310 rhs = gimple_assign_rhs1 (def_stmt);
1311 else
1312 break;
1313 }
1314 return rhs;
1315 }
1316
1317 /* Simple linked list, describing known contents of an aggregate beforere
1318 call. */
1319
1320 struct ipa_known_agg_contents_list
1321 {
1322 /* Offset and size of the described part of the aggregate. */
1323 HOST_WIDE_INT offset, size;
1324 /* Known constant value or NULL if the contents is known to be unknown. */
1325 tree constant;
1326 /* Pointer to the next structure in the list. */
1327 struct ipa_known_agg_contents_list *next;
1328 };
1329
1330 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1331 in ARG is filled in with constant values. ARG can either be an aggregate
1332 expression or a pointer to an aggregate. JFUNC is the jump function into
1333 which the constants are subsequently stored. */
1334
1335 static void
1336 determine_known_aggregate_parts (gimple call, tree arg,
1337 struct ipa_jump_func *jfunc)
1338 {
1339 struct ipa_known_agg_contents_list *list = NULL;
1340 int item_count = 0, const_count = 0;
1341 HOST_WIDE_INT arg_offset, arg_size;
1342 gimple_stmt_iterator gsi;
1343 tree arg_base;
1344 bool check_ref, by_ref;
1345 ao_ref r;
1346
1347 /* The function operates in three stages. First, we prepare check_ref, r,
1348 arg_base and arg_offset based on what is actually passed as an actual
1349 argument. */
1350
1351 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1352 {
1353 by_ref = true;
1354 if (TREE_CODE (arg) == SSA_NAME)
1355 {
1356 tree type_size;
1357 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)))))
1358 return;
1359 check_ref = true;
1360 arg_base = arg;
1361 arg_offset = 0;
1362 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1363 arg_size = tree_to_uhwi (type_size);
1364 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1365 }
1366 else if (TREE_CODE (arg) == ADDR_EXPR)
1367 {
1368 HOST_WIDE_INT arg_max_size;
1369
1370 arg = TREE_OPERAND (arg, 0);
1371 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1372 &arg_max_size);
1373 if (arg_max_size == -1
1374 || arg_max_size != arg_size
1375 || arg_offset < 0)
1376 return;
1377 if (DECL_P (arg_base))
1378 {
1379 tree size;
1380 check_ref = false;
1381 size = build_int_cst (integer_type_node, arg_size);
1382 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1383 }
1384 else
1385 return;
1386 }
1387 else
1388 return;
1389 }
1390 else
1391 {
1392 HOST_WIDE_INT arg_max_size;
1393
1394 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1395
1396 by_ref = false;
1397 check_ref = false;
1398 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1399 &arg_max_size);
1400 if (arg_max_size == -1
1401 || arg_max_size != arg_size
1402 || arg_offset < 0)
1403 return;
1404
1405 ao_ref_init (&r, arg);
1406 }
1407
1408 /* Second stage walks back the BB, looks at individual statements and as long
1409 as it is confident of how the statements affect contents of the
1410 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1411 describing it. */
1412 gsi = gsi_for_stmt (call);
1413 gsi_prev (&gsi);
1414 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1415 {
1416 struct ipa_known_agg_contents_list *n, **p;
1417 gimple stmt = gsi_stmt (gsi);
1418 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1419 tree lhs, rhs, lhs_base;
1420 bool partial_overlap;
1421
1422 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1423 continue;
1424 if (!gimple_assign_single_p (stmt))
1425 break;
1426
1427 lhs = gimple_assign_lhs (stmt);
1428 rhs = gimple_assign_rhs1 (stmt);
1429 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1430 || TREE_CODE (lhs) == BIT_FIELD_REF
1431 || contains_bitfld_component_ref_p (lhs))
1432 break;
1433
1434 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1435 &lhs_max_size);
1436 if (lhs_max_size == -1
1437 || lhs_max_size != lhs_size
1438 || (lhs_offset < arg_offset
1439 && lhs_offset + lhs_size > arg_offset)
1440 || (lhs_offset < arg_offset + arg_size
1441 && lhs_offset + lhs_size > arg_offset + arg_size))
1442 break;
1443
1444 if (check_ref)
1445 {
1446 if (TREE_CODE (lhs_base) != MEM_REF
1447 || TREE_OPERAND (lhs_base, 0) != arg_base
1448 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1449 break;
1450 }
1451 else if (lhs_base != arg_base)
1452 {
1453 if (DECL_P (lhs_base))
1454 continue;
1455 else
1456 break;
1457 }
1458
1459 if (lhs_offset + lhs_size < arg_offset
1460 || lhs_offset >= (arg_offset + arg_size))
1461 continue;
1462
1463 partial_overlap = false;
1464 p = &list;
1465 while (*p && (*p)->offset < lhs_offset)
1466 {
1467 if ((*p)->offset + (*p)->size > lhs_offset)
1468 {
1469 partial_overlap = true;
1470 break;
1471 }
1472 p = &(*p)->next;
1473 }
1474 if (partial_overlap)
1475 break;
1476 if (*p && (*p)->offset < lhs_offset + lhs_size)
1477 {
1478 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1479 /* We already know this value is subsequently overwritten with
1480 something else. */
1481 continue;
1482 else
1483 /* Otherwise this is a partial overlap which we cannot
1484 represent. */
1485 break;
1486 }
1487
1488 rhs = get_ssa_def_if_simple_copy (rhs);
1489 n = XALLOCA (struct ipa_known_agg_contents_list);
1490 n->size = lhs_size;
1491 n->offset = lhs_offset;
1492 if (is_gimple_ip_invariant (rhs))
1493 {
1494 n->constant = rhs;
1495 const_count++;
1496 }
1497 else
1498 n->constant = NULL_TREE;
1499 n->next = *p;
1500 *p = n;
1501
1502 item_count++;
1503 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1504 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1505 break;
1506 }
1507
1508 /* Third stage just goes over the list and creates an appropriate vector of
1509 ipa_agg_jf_item structures out of it, of sourse only if there are
1510 any known constants to begin with. */
1511
1512 if (const_count)
1513 {
1514 jfunc->agg.by_ref = by_ref;
1515 vec_alloc (jfunc->agg.items, const_count);
1516 while (list)
1517 {
1518 if (list->constant)
1519 {
1520 struct ipa_agg_jf_item item;
1521 item.offset = list->offset - arg_offset;
1522 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1523 item.value = unshare_expr_without_location (list->constant);
1524 jfunc->agg.items->quick_push (item);
1525 }
1526 list = list->next;
1527 }
1528 }
1529 }
1530
1531 static tree
1532 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1533 {
1534 int n;
1535 tree type = (e->callee
1536 ? TREE_TYPE (e->callee->decl)
1537 : gimple_call_fntype (e->call_stmt));
1538 tree t = TYPE_ARG_TYPES (type);
1539
1540 for (n = 0; n < i; n++)
1541 {
1542 if (!t)
1543 break;
1544 t = TREE_CHAIN (t);
1545 }
1546 if (t)
1547 return TREE_VALUE (t);
1548 if (!e->callee)
1549 return NULL;
1550 t = DECL_ARGUMENTS (e->callee->decl);
1551 for (n = 0; n < i; n++)
1552 {
1553 if (!t)
1554 return NULL;
1555 t = TREE_CHAIN (t);
1556 }
1557 if (t)
1558 return TREE_TYPE (t);
1559 return NULL;
1560 }
1561
1562 /* Compute jump function for all arguments of callsite CS and insert the
1563 information in the jump_functions array in the ipa_edge_args corresponding
1564 to this callsite. */
1565
1566 static void
1567 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1568 struct cgraph_edge *cs)
1569 {
1570 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1571 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1572 gimple call = cs->call_stmt;
1573 int n, arg_num = gimple_call_num_args (call);
1574
1575 if (arg_num == 0 || args->jump_functions)
1576 return;
1577 vec_safe_grow_cleared (args->jump_functions, arg_num);
1578
1579 if (gimple_call_internal_p (call))
1580 return;
1581 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1582 return;
1583
1584 for (n = 0; n < arg_num; n++)
1585 {
1586 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1587 tree arg = gimple_call_arg (call, n);
1588 tree param_type = ipa_get_callee_param_type (cs, n);
1589
1590 if (is_gimple_ip_invariant (arg))
1591 ipa_set_jf_constant (jfunc, arg, cs);
1592 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1593 && TREE_CODE (arg) == PARM_DECL)
1594 {
1595 int index = ipa_get_param_decl_index (info, arg);
1596
1597 gcc_assert (index >=0);
1598 /* Aggregate passed by value, check for pass-through, otherwise we
1599 will attempt to fill in aggregate contents later in this
1600 for cycle. */
1601 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1602 {
1603 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1604 continue;
1605 }
1606 }
1607 else if (TREE_CODE (arg) == SSA_NAME)
1608 {
1609 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1610 {
1611 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1612 if (index >= 0)
1613 {
1614 bool agg_p, type_p;
1615 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1616 call, arg);
1617 if (param_type && POINTER_TYPE_P (param_type))
1618 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1619 call, jfunc);
1620 else
1621 type_p = false;
1622 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1623 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1624 type_p);
1625 }
1626 }
1627 else
1628 {
1629 gimple stmt = SSA_NAME_DEF_STMT (arg);
1630 if (is_gimple_assign (stmt))
1631 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1632 call, stmt, arg, param_type);
1633 else if (gimple_code (stmt) == GIMPLE_PHI)
1634 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1635 call, stmt, param_type);
1636 }
1637 }
1638 else
1639 compute_known_type_jump_func (arg, jfunc, call,
1640 param_type
1641 && POINTER_TYPE_P (param_type)
1642 ? TREE_TYPE (param_type)
1643 : NULL);
1644
1645 if ((jfunc->type != IPA_JF_PASS_THROUGH
1646 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1647 && (jfunc->type != IPA_JF_ANCESTOR
1648 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1649 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1650 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1651 determine_known_aggregate_parts (call, arg, jfunc);
1652 }
1653 }
1654
1655 /* Compute jump functions for all edges - both direct and indirect - outgoing
1656 from NODE. Also count the actual arguments in the process. */
1657
1658 static void
1659 ipa_compute_jump_functions (struct cgraph_node *node,
1660 struct param_analysis_info *parms_ainfo)
1661 {
1662 struct cgraph_edge *cs;
1663
1664 for (cs = node->callees; cs; cs = cs->next_callee)
1665 {
1666 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1667 NULL);
1668 /* We do not need to bother analyzing calls to unknown
1669 functions unless they may become known during lto/whopr. */
1670 if (!callee->definition && !flag_lto)
1671 continue;
1672 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1673 }
1674
1675 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1676 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1677 }
1678
1679 /* If STMT looks like a statement loading a value from a member pointer formal
1680 parameter, return that parameter and store the offset of the field to
1681 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1682 might be clobbered). If USE_DELTA, then we look for a use of the delta
1683 field rather than the pfn. */
1684
1685 static tree
1686 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1687 HOST_WIDE_INT *offset_p)
1688 {
1689 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1690
1691 if (!gimple_assign_single_p (stmt))
1692 return NULL_TREE;
1693
1694 rhs = gimple_assign_rhs1 (stmt);
1695 if (TREE_CODE (rhs) == COMPONENT_REF)
1696 {
1697 ref_field = TREE_OPERAND (rhs, 1);
1698 rhs = TREE_OPERAND (rhs, 0);
1699 }
1700 else
1701 ref_field = NULL_TREE;
1702 if (TREE_CODE (rhs) != MEM_REF)
1703 return NULL_TREE;
1704 rec = TREE_OPERAND (rhs, 0);
1705 if (TREE_CODE (rec) != ADDR_EXPR)
1706 return NULL_TREE;
1707 rec = TREE_OPERAND (rec, 0);
1708 if (TREE_CODE (rec) != PARM_DECL
1709 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1710 return NULL_TREE;
1711 ref_offset = TREE_OPERAND (rhs, 1);
1712
1713 if (use_delta)
1714 fld = delta_field;
1715 else
1716 fld = ptr_field;
1717 if (offset_p)
1718 *offset_p = int_bit_position (fld);
1719
1720 if (ref_field)
1721 {
1722 if (integer_nonzerop (ref_offset))
1723 return NULL_TREE;
1724 return ref_field == fld ? rec : NULL_TREE;
1725 }
1726 else
1727 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1728 : NULL_TREE;
1729 }
1730
1731 /* Returns true iff T is an SSA_NAME defined by a statement. */
1732
1733 static bool
1734 ipa_is_ssa_with_stmt_def (tree t)
1735 {
1736 if (TREE_CODE (t) == SSA_NAME
1737 && !SSA_NAME_IS_DEFAULT_DEF (t))
1738 return true;
1739 else
1740 return false;
1741 }
1742
1743 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1744 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1745 indirect call graph edge. */
1746
1747 static struct cgraph_edge *
1748 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1749 {
1750 struct cgraph_edge *cs;
1751
1752 cs = cgraph_edge (node, stmt);
1753 cs->indirect_info->param_index = param_index;
1754 cs->indirect_info->agg_contents = 0;
1755 cs->indirect_info->member_ptr = 0;
1756 return cs;
1757 }
1758
1759 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1760 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1761 intermediate information about each formal parameter. Currently it checks
1762 whether the call calls a pointer that is a formal parameter and if so, the
1763 parameter is marked with the called flag and an indirect call graph edge
1764 describing the call is created. This is very simple for ordinary pointers
1765 represented in SSA but not-so-nice when it comes to member pointers. The
1766 ugly part of this function does nothing more than trying to match the
1767 pattern of such a call. An example of such a pattern is the gimple dump
1768 below, the call is on the last line:
1769
1770 <bb 2>:
1771 f$__delta_5 = f.__delta;
1772 f$__pfn_24 = f.__pfn;
1773
1774 or
1775 <bb 2>:
1776 f$__delta_5 = MEM[(struct *)&f];
1777 f$__pfn_24 = MEM[(struct *)&f + 4B];
1778
1779 and a few lines below:
1780
1781 <bb 5>
1782 D.2496_3 = (int) f$__pfn_24;
1783 D.2497_4 = D.2496_3 & 1;
1784 if (D.2497_4 != 0)
1785 goto <bb 3>;
1786 else
1787 goto <bb 4>;
1788
1789 <bb 6>:
1790 D.2500_7 = (unsigned int) f$__delta_5;
1791 D.2501_8 = &S + D.2500_7;
1792 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1793 D.2503_10 = *D.2502_9;
1794 D.2504_12 = f$__pfn_24 + -1;
1795 D.2505_13 = (unsigned int) D.2504_12;
1796 D.2506_14 = D.2503_10 + D.2505_13;
1797 D.2507_15 = *D.2506_14;
1798 iftmp.11_16 = (String:: *) D.2507_15;
1799
1800 <bb 7>:
1801 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1802 D.2500_19 = (unsigned int) f$__delta_5;
1803 D.2508_20 = &S + D.2500_19;
1804 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1805
1806 Such patterns are results of simple calls to a member pointer:
1807
1808 int doprinting (int (MyString::* f)(int) const)
1809 {
1810 MyString S ("somestring");
1811
1812 return (S.*f)(4);
1813 }
1814
1815 Moreover, the function also looks for called pointers loaded from aggregates
1816 passed by value or reference. */
1817
1818 static void
1819 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1820 struct ipa_node_params *info,
1821 struct param_analysis_info *parms_ainfo,
1822 gimple call, tree target)
1823 {
1824 gimple def;
1825 tree n1, n2;
1826 gimple d1, d2;
1827 tree rec, rec2, cond;
1828 gimple branch;
1829 int index;
1830 basic_block bb, virt_bb, join;
1831 HOST_WIDE_INT offset;
1832 bool by_ref;
1833
1834 if (SSA_NAME_IS_DEFAULT_DEF (target))
1835 {
1836 tree var = SSA_NAME_VAR (target);
1837 index = ipa_get_param_decl_index (info, var);
1838 if (index >= 0)
1839 ipa_note_param_call (node, index, call);
1840 return;
1841 }
1842
1843 def = SSA_NAME_DEF_STMT (target);
1844 if (gimple_assign_single_p (def)
1845 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1846 gimple_assign_rhs1 (def), &index, &offset,
1847 NULL, &by_ref))
1848 {
1849 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1850 if (cs->indirect_info->offset != offset)
1851 cs->indirect_info->outer_type = NULL;
1852 cs->indirect_info->offset = offset;
1853 cs->indirect_info->agg_contents = 1;
1854 cs->indirect_info->by_ref = by_ref;
1855 return;
1856 }
1857
1858 /* Now we need to try to match the complex pattern of calling a member
1859 pointer. */
1860 if (gimple_code (def) != GIMPLE_PHI
1861 || gimple_phi_num_args (def) != 2
1862 || !POINTER_TYPE_P (TREE_TYPE (target))
1863 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1864 return;
1865
1866 /* First, we need to check whether one of these is a load from a member
1867 pointer that is a parameter to this function. */
1868 n1 = PHI_ARG_DEF (def, 0);
1869 n2 = PHI_ARG_DEF (def, 1);
1870 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1871 return;
1872 d1 = SSA_NAME_DEF_STMT (n1);
1873 d2 = SSA_NAME_DEF_STMT (n2);
1874
1875 join = gimple_bb (def);
1876 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1877 {
1878 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1879 return;
1880
1881 bb = EDGE_PRED (join, 0)->src;
1882 virt_bb = gimple_bb (d2);
1883 }
1884 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1885 {
1886 bb = EDGE_PRED (join, 1)->src;
1887 virt_bb = gimple_bb (d1);
1888 }
1889 else
1890 return;
1891
1892 /* Second, we need to check that the basic blocks are laid out in the way
1893 corresponding to the pattern. */
1894
1895 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1896 || single_pred (virt_bb) != bb
1897 || single_succ (virt_bb) != join)
1898 return;
1899
1900 /* Third, let's see that the branching is done depending on the least
1901 significant bit of the pfn. */
1902
1903 branch = last_stmt (bb);
1904 if (!branch || gimple_code (branch) != GIMPLE_COND)
1905 return;
1906
1907 if ((gimple_cond_code (branch) != NE_EXPR
1908 && gimple_cond_code (branch) != EQ_EXPR)
1909 || !integer_zerop (gimple_cond_rhs (branch)))
1910 return;
1911
1912 cond = gimple_cond_lhs (branch);
1913 if (!ipa_is_ssa_with_stmt_def (cond))
1914 return;
1915
1916 def = SSA_NAME_DEF_STMT (cond);
1917 if (!is_gimple_assign (def)
1918 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1919 || !integer_onep (gimple_assign_rhs2 (def)))
1920 return;
1921
1922 cond = gimple_assign_rhs1 (def);
1923 if (!ipa_is_ssa_with_stmt_def (cond))
1924 return;
1925
1926 def = SSA_NAME_DEF_STMT (cond);
1927
1928 if (is_gimple_assign (def)
1929 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1930 {
1931 cond = gimple_assign_rhs1 (def);
1932 if (!ipa_is_ssa_with_stmt_def (cond))
1933 return;
1934 def = SSA_NAME_DEF_STMT (cond);
1935 }
1936
1937 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1938 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1939 == ptrmemfunc_vbit_in_delta),
1940 NULL);
1941 if (rec != rec2)
1942 return;
1943
1944 index = ipa_get_param_decl_index (info, rec);
1945 if (index >= 0
1946 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1947 {
1948 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1949 if (cs->indirect_info->offset != offset)
1950 cs->indirect_info->outer_type = NULL;
1951 cs->indirect_info->offset = offset;
1952 cs->indirect_info->agg_contents = 1;
1953 cs->indirect_info->member_ptr = 1;
1954 }
1955
1956 return;
1957 }
1958
1959 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1960 object referenced in the expression is a formal parameter of the caller
1961 (described by INFO), create a call note for the statement. */
1962
1963 static void
1964 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1965 struct ipa_node_params *info, gimple call,
1966 tree target)
1967 {
1968 struct cgraph_edge *cs;
1969 struct cgraph_indirect_call_info *ii;
1970 struct ipa_jump_func jfunc;
1971 tree obj = OBJ_TYPE_REF_OBJECT (target);
1972 int index;
1973 HOST_WIDE_INT anc_offset;
1974
1975 if (!flag_devirtualize)
1976 return;
1977
1978 if (TREE_CODE (obj) != SSA_NAME)
1979 return;
1980
1981 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1982 {
1983 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1984 return;
1985
1986 anc_offset = 0;
1987 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1988 gcc_assert (index >= 0);
1989 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1990 call, &jfunc))
1991 return;
1992 }
1993 else
1994 {
1995 gimple stmt = SSA_NAME_DEF_STMT (obj);
1996 tree expr;
1997
1998 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1999 if (!expr)
2000 return;
2001 index = ipa_get_param_decl_index (info,
2002 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2003 gcc_assert (index >= 0);
2004 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2005 call, &jfunc, anc_offset))
2006 return;
2007 }
2008
2009 cs = ipa_note_param_call (node, index, call);
2010 ii = cs->indirect_info;
2011 ii->offset = anc_offset;
2012 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2013 ii->otr_type = obj_type_ref_class (target);
2014 ii->polymorphic = 1;
2015 }
2016
2017 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2018 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2019 containing intermediate information about each formal parameter. */
2020
2021 static void
2022 ipa_analyze_call_uses (struct cgraph_node *node,
2023 struct ipa_node_params *info,
2024 struct param_analysis_info *parms_ainfo, gimple call)
2025 {
2026 tree target = gimple_call_fn (call);
2027 struct cgraph_edge *cs;
2028
2029 if (!target
2030 || (TREE_CODE (target) != SSA_NAME
2031 && !virtual_method_call_p (target)))
2032 return;
2033
2034 /* If we previously turned the call into a direct call, there is
2035 no need to analyze. */
2036 cs = cgraph_edge (node, call);
2037 if (cs && !cs->indirect_unknown_callee)
2038 return;
2039 if (TREE_CODE (target) == SSA_NAME)
2040 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2041 else if (virtual_method_call_p (target))
2042 ipa_analyze_virtual_call_uses (node, info, call, target);
2043 }
2044
2045
2046 /* Analyze the call statement STMT with respect to formal parameters (described
2047 in INFO) of caller given by NODE. Currently it only checks whether formal
2048 parameters are called. PARMS_AINFO is a pointer to a vector containing
2049 intermediate information about each formal parameter. */
2050
2051 static void
2052 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2053 struct param_analysis_info *parms_ainfo, gimple stmt)
2054 {
2055 if (is_gimple_call (stmt))
2056 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2057 }
2058
2059 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2060 If OP is a parameter declaration, mark it as used in the info structure
2061 passed in DATA. */
2062
2063 static bool
2064 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2065 {
2066 struct ipa_node_params *info = (struct ipa_node_params *) data;
2067
2068 op = get_base_address (op);
2069 if (op
2070 && TREE_CODE (op) == PARM_DECL)
2071 {
2072 int index = ipa_get_param_decl_index (info, op);
2073 gcc_assert (index >= 0);
2074 ipa_set_param_used (info, index, true);
2075 }
2076
2077 return false;
2078 }
2079
2080 /* Scan the function body of NODE and inspect the uses of formal parameters.
2081 Store the findings in various structures of the associated ipa_node_params
2082 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2083 vector containing intermediate information about each formal parameter. */
2084
2085 static void
2086 ipa_analyze_params_uses (struct cgraph_node *node,
2087 struct param_analysis_info *parms_ainfo)
2088 {
2089 tree decl = node->decl;
2090 basic_block bb;
2091 struct function *func;
2092 gimple_stmt_iterator gsi;
2093 struct ipa_node_params *info = IPA_NODE_REF (node);
2094 int i;
2095
2096 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2097 return;
2098
2099 info->uses_analysis_done = 1;
2100 if (ipa_func_spec_opts_forbid_analysis_p (node))
2101 {
2102 for (i = 0; i < ipa_get_param_count (info); i++)
2103 {
2104 ipa_set_param_used (info, i, true);
2105 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2106 }
2107 return;
2108 }
2109
2110 for (i = 0; i < ipa_get_param_count (info); i++)
2111 {
2112 tree parm = ipa_get_param (info, i);
2113 int controlled_uses = 0;
2114
2115 /* For SSA regs see if parameter is used. For non-SSA we compute
2116 the flag during modification analysis. */
2117 if (is_gimple_reg (parm))
2118 {
2119 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2120 parm);
2121 if (ddef && !has_zero_uses (ddef))
2122 {
2123 imm_use_iterator imm_iter;
2124 use_operand_p use_p;
2125
2126 ipa_set_param_used (info, i, true);
2127 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2128 if (!is_gimple_call (USE_STMT (use_p)))
2129 {
2130 if (!is_gimple_debug (USE_STMT (use_p)))
2131 {
2132 controlled_uses = IPA_UNDESCRIBED_USE;
2133 break;
2134 }
2135 }
2136 else
2137 controlled_uses++;
2138 }
2139 else
2140 controlled_uses = 0;
2141 }
2142 else
2143 controlled_uses = IPA_UNDESCRIBED_USE;
2144 ipa_set_controlled_uses (info, i, controlled_uses);
2145 }
2146
2147 func = DECL_STRUCT_FUNCTION (decl);
2148 FOR_EACH_BB_FN (bb, func)
2149 {
2150 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2151 {
2152 gimple stmt = gsi_stmt (gsi);
2153
2154 if (is_gimple_debug (stmt))
2155 continue;
2156
2157 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2158 walk_stmt_load_store_addr_ops (stmt, info,
2159 visit_ref_for_mod_analysis,
2160 visit_ref_for_mod_analysis,
2161 visit_ref_for_mod_analysis);
2162 }
2163 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2164 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2165 visit_ref_for_mod_analysis,
2166 visit_ref_for_mod_analysis,
2167 visit_ref_for_mod_analysis);
2168 }
2169 }
2170
2171 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2172
2173 static void
2174 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2175 {
2176 int i;
2177
2178 for (i = 0; i < param_count; i++)
2179 {
2180 if (parms_ainfo[i].parm_visited_statements)
2181 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2182 if (parms_ainfo[i].pt_visited_statements)
2183 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2184 }
2185 }
2186
2187 /* Initialize the array describing properties of of formal parameters
2188 of NODE, analyze their uses and compute jump functions associated
2189 with actual arguments of calls from within NODE. */
2190
2191 void
2192 ipa_analyze_node (struct cgraph_node *node)
2193 {
2194 struct ipa_node_params *info;
2195 struct param_analysis_info *parms_ainfo;
2196 int param_count;
2197
2198 ipa_check_create_node_params ();
2199 ipa_check_create_edge_args ();
2200 info = IPA_NODE_REF (node);
2201 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2202 ipa_initialize_node_params (node);
2203
2204 param_count = ipa_get_param_count (info);
2205 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2206 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2207
2208 ipa_analyze_params_uses (node, parms_ainfo);
2209 ipa_compute_jump_functions (node, parms_ainfo);
2210
2211 free_parms_ainfo (parms_ainfo, param_count);
2212 pop_cfun ();
2213 }
2214
2215 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2216 attempt a type-based devirtualization. If successful, return the
2217 target function declaration, otherwise return NULL. */
2218
2219 tree
2220 ipa_intraprocedural_devirtualization (gimple call)
2221 {
2222 tree binfo, token, fndecl;
2223 struct ipa_jump_func jfunc;
2224 tree otr = gimple_call_fn (call);
2225
2226 jfunc.type = IPA_JF_UNKNOWN;
2227 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2228 call, obj_type_ref_class (otr));
2229 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2230 return NULL_TREE;
2231 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2232 if (!binfo)
2233 return NULL_TREE;
2234 token = OBJ_TYPE_REF_TOKEN (otr);
2235 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2236 binfo);
2237 #ifdef ENABLE_CHECKING
2238 if (fndecl)
2239 gcc_assert (possible_polymorphic_call_target_p
2240 (otr, cgraph_get_node (fndecl)));
2241 #endif
2242 return fndecl;
2243 }
2244
2245 /* Update the jump function DST when the call graph edge corresponding to SRC is
2246 is being inlined, knowing that DST is of type ancestor and src of known
2247 type. */
2248
2249 static void
2250 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2251 struct ipa_jump_func *dst)
2252 {
2253 HOST_WIDE_INT combined_offset;
2254 tree combined_type;
2255
2256 if (!ipa_get_jf_ancestor_type_preserved (dst))
2257 {
2258 dst->type = IPA_JF_UNKNOWN;
2259 return;
2260 }
2261
2262 combined_offset = ipa_get_jf_known_type_offset (src)
2263 + ipa_get_jf_ancestor_offset (dst);
2264 combined_type = ipa_get_jf_ancestor_type (dst);
2265
2266 ipa_set_jf_known_type (dst, combined_offset,
2267 ipa_get_jf_known_type_base_type (src),
2268 combined_type);
2269 }
2270
2271 /* Update the jump functions associated with call graph edge E when the call
2272 graph edge CS is being inlined, assuming that E->caller is already (possibly
2273 indirectly) inlined into CS->callee and that E has not been inlined. */
2274
2275 static void
2276 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2277 struct cgraph_edge *e)
2278 {
2279 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2280 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2281 int count = ipa_get_cs_argument_count (args);
2282 int i;
2283
2284 for (i = 0; i < count; i++)
2285 {
2286 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2287
2288 if (dst->type == IPA_JF_ANCESTOR)
2289 {
2290 struct ipa_jump_func *src;
2291 int dst_fid = dst->value.ancestor.formal_id;
2292
2293 /* Variable number of arguments can cause havoc if we try to access
2294 one that does not exist in the inlined edge. So make sure we
2295 don't. */
2296 if (dst_fid >= ipa_get_cs_argument_count (top))
2297 {
2298 dst->type = IPA_JF_UNKNOWN;
2299 continue;
2300 }
2301
2302 src = ipa_get_ith_jump_func (top, dst_fid);
2303
2304 if (src->agg.items
2305 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2306 {
2307 struct ipa_agg_jf_item *item;
2308 int j;
2309
2310 /* Currently we do not produce clobber aggregate jump functions,
2311 replace with merging when we do. */
2312 gcc_assert (!dst->agg.items);
2313
2314 dst->agg.items = vec_safe_copy (src->agg.items);
2315 dst->agg.by_ref = src->agg.by_ref;
2316 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2317 item->offset -= dst->value.ancestor.offset;
2318 }
2319
2320 if (src->type == IPA_JF_KNOWN_TYPE)
2321 combine_known_type_and_ancestor_jfs (src, dst);
2322 else if (src->type == IPA_JF_PASS_THROUGH
2323 && src->value.pass_through.operation == NOP_EXPR)
2324 {
2325 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2326 dst->value.ancestor.agg_preserved &=
2327 src->value.pass_through.agg_preserved;
2328 dst->value.ancestor.type_preserved &=
2329 src->value.pass_through.type_preserved;
2330 }
2331 else if (src->type == IPA_JF_ANCESTOR)
2332 {
2333 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2334 dst->value.ancestor.offset += src->value.ancestor.offset;
2335 dst->value.ancestor.agg_preserved &=
2336 src->value.ancestor.agg_preserved;
2337 dst->value.ancestor.type_preserved &=
2338 src->value.ancestor.type_preserved;
2339 }
2340 else
2341 dst->type = IPA_JF_UNKNOWN;
2342 }
2343 else if (dst->type == IPA_JF_PASS_THROUGH)
2344 {
2345 struct ipa_jump_func *src;
2346 /* We must check range due to calls with variable number of arguments
2347 and we cannot combine jump functions with operations. */
2348 if (dst->value.pass_through.operation == NOP_EXPR
2349 && (dst->value.pass_through.formal_id
2350 < ipa_get_cs_argument_count (top)))
2351 {
2352 int dst_fid = dst->value.pass_through.formal_id;
2353 src = ipa_get_ith_jump_func (top, dst_fid);
2354 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2355
2356 switch (src->type)
2357 {
2358 case IPA_JF_UNKNOWN:
2359 dst->type = IPA_JF_UNKNOWN;
2360 break;
2361 case IPA_JF_KNOWN_TYPE:
2362 if (ipa_get_jf_pass_through_type_preserved (dst))
2363 ipa_set_jf_known_type (dst,
2364 ipa_get_jf_known_type_offset (src),
2365 ipa_get_jf_known_type_base_type (src),
2366 ipa_get_jf_known_type_base_type (src));
2367 else
2368 dst->type = IPA_JF_UNKNOWN;
2369 break;
2370 case IPA_JF_CONST:
2371 ipa_set_jf_cst_copy (dst, src);
2372 break;
2373
2374 case IPA_JF_PASS_THROUGH:
2375 {
2376 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2377 enum tree_code operation;
2378 operation = ipa_get_jf_pass_through_operation (src);
2379
2380 if (operation == NOP_EXPR)
2381 {
2382 bool agg_p, type_p;
2383 agg_p = dst_agg_p
2384 && ipa_get_jf_pass_through_agg_preserved (src);
2385 type_p = ipa_get_jf_pass_through_type_preserved (src)
2386 && ipa_get_jf_pass_through_type_preserved (dst);
2387 ipa_set_jf_simple_pass_through (dst, formal_id,
2388 agg_p, type_p);
2389 }
2390 else
2391 {
2392 tree operand = ipa_get_jf_pass_through_operand (src);
2393 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2394 operation);
2395 }
2396 break;
2397 }
2398 case IPA_JF_ANCESTOR:
2399 {
2400 bool agg_p, type_p;
2401 agg_p = dst_agg_p
2402 && ipa_get_jf_ancestor_agg_preserved (src);
2403 type_p = ipa_get_jf_ancestor_type_preserved (src)
2404 && ipa_get_jf_pass_through_type_preserved (dst);
2405 ipa_set_ancestor_jf (dst,
2406 ipa_get_jf_ancestor_offset (src),
2407 ipa_get_jf_ancestor_type (src),
2408 ipa_get_jf_ancestor_formal_id (src),
2409 agg_p, type_p);
2410 break;
2411 }
2412 default:
2413 gcc_unreachable ();
2414 }
2415
2416 if (src->agg.items
2417 && (dst_agg_p || !src->agg.by_ref))
2418 {
2419 /* Currently we do not produce clobber aggregate jump
2420 functions, replace with merging when we do. */
2421 gcc_assert (!dst->agg.items);
2422
2423 dst->agg.by_ref = src->agg.by_ref;
2424 dst->agg.items = vec_safe_copy (src->agg.items);
2425 }
2426 }
2427 else
2428 dst->type = IPA_JF_UNKNOWN;
2429 }
2430 }
2431 }
2432
2433 /* If TARGET is an addr_expr of a function declaration, make it the destination
2434 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2435
2436 struct cgraph_edge *
2437 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2438 {
2439 struct cgraph_node *callee;
2440 struct inline_edge_summary *es = inline_edge_summary (ie);
2441 bool unreachable = false;
2442
2443 if (TREE_CODE (target) == ADDR_EXPR)
2444 target = TREE_OPERAND (target, 0);
2445 if (TREE_CODE (target) != FUNCTION_DECL)
2446 {
2447 target = canonicalize_constructor_val (target, NULL);
2448 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2449 {
2450 if (ie->indirect_info->member_ptr)
2451 /* Member pointer call that goes through a VMT lookup. */
2452 return NULL;
2453
2454 if (dump_file)
2455 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2456 " in %s/%i, making it unreachable.\n",
2457 ie->caller->name (), ie->caller->order);
2458 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2459 callee = cgraph_get_create_node (target);
2460 unreachable = true;
2461 }
2462 else
2463 callee = cgraph_get_node (target);
2464 }
2465 else
2466 callee = cgraph_get_node (target);
2467
2468 /* Because may-edges are not explicitely represented and vtable may be external,
2469 we may create the first reference to the object in the unit. */
2470 if (!callee || callee->global.inlined_to)
2471 {
2472
2473 /* We are better to ensure we can refer to it.
2474 In the case of static functions we are out of luck, since we already
2475 removed its body. In the case of public functions we may or may
2476 not introduce the reference. */
2477 if (!canonicalize_constructor_val (target, NULL)
2478 || !TREE_PUBLIC (target))
2479 {
2480 if (dump_file)
2481 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2482 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2483 xstrdup (ie->caller->name ()),
2484 ie->caller->order,
2485 xstrdup (ie->callee->name ()),
2486 ie->callee->order);
2487 return NULL;
2488 }
2489 callee = cgraph_get_create_node (target);
2490 }
2491 ipa_check_create_node_params ();
2492
2493 /* We can not make edges to inline clones. It is bug that someone removed
2494 the cgraph node too early. */
2495 gcc_assert (!callee->global.inlined_to);
2496
2497 if (dump_file && !unreachable)
2498 {
2499 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2500 "(%s/%i -> %s/%i), for stmt ",
2501 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2502 xstrdup (ie->caller->name ()),
2503 ie->caller->order,
2504 xstrdup (callee->name ()),
2505 callee->order);
2506 if (ie->call_stmt)
2507 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2508 else
2509 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2510 }
2511 ie = cgraph_make_edge_direct (ie, callee);
2512 es = inline_edge_summary (ie);
2513 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2514 - eni_size_weights.call_cost);
2515 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2516 - eni_time_weights.call_cost);
2517
2518 return ie;
2519 }
2520
2521 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2522 return NULL if there is not any. BY_REF specifies whether the value has to
2523 be passed by reference or by value. */
2524
2525 tree
2526 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2527 HOST_WIDE_INT offset, bool by_ref)
2528 {
2529 struct ipa_agg_jf_item *item;
2530 int i;
2531
2532 if (by_ref != agg->by_ref)
2533 return NULL;
2534
2535 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2536 if (item->offset == offset)
2537 {
2538 /* Currently we do not have clobber values, return NULL for them once
2539 we do. */
2540 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2541 return item->value;
2542 }
2543 return NULL;
2544 }
2545
2546 /* Remove a reference to SYMBOL from the list of references of a node given by
2547 reference description RDESC. Return true if the reference has been
2548 successfully found and removed. */
2549
2550 static bool
2551 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2552 {
2553 struct ipa_ref *to_del;
2554 struct cgraph_edge *origin;
2555
2556 origin = rdesc->cs;
2557 if (!origin)
2558 return false;
2559 to_del = ipa_find_reference (origin->caller, symbol,
2560 origin->call_stmt, origin->lto_stmt_uid);
2561 if (!to_del)
2562 return false;
2563
2564 ipa_remove_reference (to_del);
2565 if (dump_file)
2566 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2567 xstrdup (origin->caller->name ()),
2568 origin->caller->order, xstrdup (symbol->name ()));
2569 return true;
2570 }
2571
2572 /* If JFUNC has a reference description with refcount different from
2573 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2574 NULL. JFUNC must be a constant jump function. */
2575
2576 static struct ipa_cst_ref_desc *
2577 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2578 {
2579 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2580 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2581 return rdesc;
2582 else
2583 return NULL;
2584 }
2585
2586 /* If the value of constant jump function JFUNC is an address of a function
2587 declaration, return the associated call graph node. Otherwise return
2588 NULL. */
2589
2590 static cgraph_node *
2591 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2592 {
2593 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2594 tree cst = ipa_get_jf_constant (jfunc);
2595 if (TREE_CODE (cst) != ADDR_EXPR
2596 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2597 return NULL;
2598
2599 return cgraph_get_node (TREE_OPERAND (cst, 0));
2600 }
2601
2602
2603 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2604 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2605 the edge specified in the rdesc. Return false if either the symbol or the
2606 reference could not be found, otherwise return true. */
2607
2608 static bool
2609 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2610 {
2611 struct ipa_cst_ref_desc *rdesc;
2612 if (jfunc->type == IPA_JF_CONST
2613 && (rdesc = jfunc_rdesc_usable (jfunc))
2614 && --rdesc->refcount == 0)
2615 {
2616 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2617 if (!symbol)
2618 return false;
2619
2620 return remove_described_reference (symbol, rdesc);
2621 }
2622 return true;
2623 }
2624
2625 /* Try to find a destination for indirect edge IE that corresponds to a simple
2626 call or a call of a member function pointer and where the destination is a
2627 pointer formal parameter described by jump function JFUNC. If it can be
2628 determined, return the newly direct edge, otherwise return NULL.
2629 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2630
2631 static struct cgraph_edge *
2632 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2633 struct ipa_jump_func *jfunc,
2634 struct ipa_node_params *new_root_info)
2635 {
2636 struct cgraph_edge *cs;
2637 tree target;
2638 bool agg_contents = ie->indirect_info->agg_contents;
2639
2640 if (ie->indirect_info->agg_contents)
2641 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2642 ie->indirect_info->offset,
2643 ie->indirect_info->by_ref);
2644 else
2645 target = ipa_value_from_jfunc (new_root_info, jfunc);
2646 if (!target)
2647 return NULL;
2648 cs = ipa_make_edge_direct_to_target (ie, target);
2649
2650 if (cs && !agg_contents)
2651 {
2652 bool ok;
2653 gcc_checking_assert (cs->callee
2654 && (cs != ie
2655 || jfunc->type != IPA_JF_CONST
2656 || !cgraph_node_for_jfunc (jfunc)
2657 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2658 ok = try_decrement_rdesc_refcount (jfunc);
2659 gcc_checking_assert (ok);
2660 }
2661
2662 return cs;
2663 }
2664
2665 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2666 call based on a formal parameter which is described by jump function JFUNC
2667 and if it can be determined, make it direct and return the direct edge.
2668 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2669 are relative to. */
2670
2671 static struct cgraph_edge *
2672 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2673 struct ipa_jump_func *jfunc,
2674 struct ipa_node_params *new_root_info)
2675 {
2676 tree binfo, target;
2677
2678 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2679
2680 if (!binfo)
2681 return NULL;
2682
2683 if (TREE_CODE (binfo) != TREE_BINFO)
2684 {
2685 binfo = gimple_extract_devirt_binfo_from_cst
2686 (binfo, ie->indirect_info->otr_type);
2687 if (!binfo)
2688 return NULL;
2689 }
2690
2691 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2692 ie->indirect_info->otr_type);
2693 if (binfo)
2694 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2695 binfo);
2696 else
2697 return NULL;
2698
2699 if (target)
2700 {
2701 #ifdef ENABLE_CHECKING
2702 gcc_assert (possible_polymorphic_call_target_p
2703 (ie, cgraph_get_node (target)));
2704 #endif
2705 return ipa_make_edge_direct_to_target (ie, target);
2706 }
2707 else
2708 return NULL;
2709 }
2710
2711 /* Update the param called notes associated with NODE when CS is being inlined,
2712 assuming NODE is (potentially indirectly) inlined into CS->callee.
2713 Moreover, if the callee is discovered to be constant, create a new cgraph
2714 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2715 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2716
2717 static bool
2718 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2719 struct cgraph_node *node,
2720 vec<cgraph_edge_p> *new_edges)
2721 {
2722 struct ipa_edge_args *top;
2723 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2724 struct ipa_node_params *new_root_info;
2725 bool res = false;
2726
2727 ipa_check_create_edge_args ();
2728 top = IPA_EDGE_REF (cs);
2729 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2730 ? cs->caller->global.inlined_to
2731 : cs->caller);
2732
2733 for (ie = node->indirect_calls; ie; ie = next_ie)
2734 {
2735 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2736 struct ipa_jump_func *jfunc;
2737 int param_index;
2738
2739 next_ie = ie->next_callee;
2740
2741 if (ici->param_index == -1)
2742 continue;
2743
2744 /* We must check range due to calls with variable number of arguments: */
2745 if (ici->param_index >= ipa_get_cs_argument_count (top))
2746 {
2747 ici->param_index = -1;
2748 continue;
2749 }
2750
2751 param_index = ici->param_index;
2752 jfunc = ipa_get_ith_jump_func (top, param_index);
2753
2754 if (!flag_indirect_inlining)
2755 new_direct_edge = NULL;
2756 else if (ici->polymorphic)
2757 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2758 new_root_info);
2759 else
2760 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2761 new_root_info);
2762 /* If speculation was removed, then we need to do nothing. */
2763 if (new_direct_edge && new_direct_edge != ie)
2764 {
2765 new_direct_edge->indirect_inlining_edge = 1;
2766 top = IPA_EDGE_REF (cs);
2767 res = true;
2768 }
2769 else if (new_direct_edge)
2770 {
2771 new_direct_edge->indirect_inlining_edge = 1;
2772 if (new_direct_edge->call_stmt)
2773 new_direct_edge->call_stmt_cannot_inline_p
2774 = !gimple_check_call_matching_types (
2775 new_direct_edge->call_stmt,
2776 new_direct_edge->callee->decl, false);
2777 if (new_edges)
2778 {
2779 new_edges->safe_push (new_direct_edge);
2780 res = true;
2781 }
2782 top = IPA_EDGE_REF (cs);
2783 }
2784 else if (jfunc->type == IPA_JF_PASS_THROUGH
2785 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2786 {
2787 if (ici->agg_contents
2788 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2789 ici->param_index = -1;
2790 else
2791 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2792 }
2793 else if (jfunc->type == IPA_JF_ANCESTOR)
2794 {
2795 if (ici->agg_contents
2796 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2797 ici->param_index = -1;
2798 else
2799 {
2800 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2801 if (ipa_get_jf_ancestor_offset (jfunc))
2802 ici->outer_type = NULL;
2803 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2804 }
2805 }
2806 else
2807 /* Either we can find a destination for this edge now or never. */
2808 ici->param_index = -1;
2809 }
2810
2811 return res;
2812 }
2813
2814 /* Recursively traverse subtree of NODE (including node) made of inlined
2815 cgraph_edges when CS has been inlined and invoke
2816 update_indirect_edges_after_inlining on all nodes and
2817 update_jump_functions_after_inlining on all non-inlined edges that lead out
2818 of this subtree. Newly discovered indirect edges will be added to
2819 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2820 created. */
2821
2822 static bool
2823 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2824 struct cgraph_node *node,
2825 vec<cgraph_edge_p> *new_edges)
2826 {
2827 struct cgraph_edge *e;
2828 bool res;
2829
2830 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2831
2832 for (e = node->callees; e; e = e->next_callee)
2833 if (!e->inline_failed)
2834 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2835 else
2836 update_jump_functions_after_inlining (cs, e);
2837 for (e = node->indirect_calls; e; e = e->next_callee)
2838 update_jump_functions_after_inlining (cs, e);
2839
2840 return res;
2841 }
2842
2843 /* Combine two controlled uses counts as done during inlining. */
2844
2845 static int
2846 combine_controlled_uses_counters (int c, int d)
2847 {
2848 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2849 return IPA_UNDESCRIBED_USE;
2850 else
2851 return c + d - 1;
2852 }
2853
2854 /* Propagate number of controlled users from CS->caleee to the new root of the
2855 tree of inlined nodes. */
2856
2857 static void
2858 propagate_controlled_uses (struct cgraph_edge *cs)
2859 {
2860 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2861 struct cgraph_node *new_root = cs->caller->global.inlined_to
2862 ? cs->caller->global.inlined_to : cs->caller;
2863 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2864 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2865 int count, i;
2866
2867 count = MIN (ipa_get_cs_argument_count (args),
2868 ipa_get_param_count (old_root_info));
2869 for (i = 0; i < count; i++)
2870 {
2871 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2872 struct ipa_cst_ref_desc *rdesc;
2873
2874 if (jf->type == IPA_JF_PASS_THROUGH)
2875 {
2876 int src_idx, c, d;
2877 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2878 c = ipa_get_controlled_uses (new_root_info, src_idx);
2879 d = ipa_get_controlled_uses (old_root_info, i);
2880
2881 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2882 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2883 c = combine_controlled_uses_counters (c, d);
2884 ipa_set_controlled_uses (new_root_info, src_idx, c);
2885 if (c == 0 && new_root_info->ipcp_orig_node)
2886 {
2887 struct cgraph_node *n;
2888 struct ipa_ref *ref;
2889 tree t = new_root_info->known_vals[src_idx];
2890
2891 if (t && TREE_CODE (t) == ADDR_EXPR
2892 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2893 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2894 && (ref = ipa_find_reference (new_root,
2895 n, NULL, 0)))
2896 {
2897 if (dump_file)
2898 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2899 "reference from %s/%i to %s/%i.\n",
2900 xstrdup (new_root->name ()),
2901 new_root->order,
2902 xstrdup (n->name ()), n->order);
2903 ipa_remove_reference (ref);
2904 }
2905 }
2906 }
2907 else if (jf->type == IPA_JF_CONST
2908 && (rdesc = jfunc_rdesc_usable (jf)))
2909 {
2910 int d = ipa_get_controlled_uses (old_root_info, i);
2911 int c = rdesc->refcount;
2912 rdesc->refcount = combine_controlled_uses_counters (c, d);
2913 if (rdesc->refcount == 0)
2914 {
2915 tree cst = ipa_get_jf_constant (jf);
2916 struct cgraph_node *n;
2917 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2918 && TREE_CODE (TREE_OPERAND (cst, 0))
2919 == FUNCTION_DECL);
2920 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2921 if (n)
2922 {
2923 struct cgraph_node *clone;
2924 bool ok;
2925 ok = remove_described_reference (n, rdesc);
2926 gcc_checking_assert (ok);
2927
2928 clone = cs->caller;
2929 while (clone->global.inlined_to
2930 && clone != rdesc->cs->caller
2931 && IPA_NODE_REF (clone)->ipcp_orig_node)
2932 {
2933 struct ipa_ref *ref;
2934 ref = ipa_find_reference (clone,
2935 n, NULL, 0);
2936 if (ref)
2937 {
2938 if (dump_file)
2939 fprintf (dump_file, "ipa-prop: Removing "
2940 "cloning-created reference "
2941 "from %s/%i to %s/%i.\n",
2942 xstrdup (clone->name ()),
2943 clone->order,
2944 xstrdup (n->name ()),
2945 n->order);
2946 ipa_remove_reference (ref);
2947 }
2948 clone = clone->callers->caller;
2949 }
2950 }
2951 }
2952 }
2953 }
2954
2955 for (i = ipa_get_param_count (old_root_info);
2956 i < ipa_get_cs_argument_count (args);
2957 i++)
2958 {
2959 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2960
2961 if (jf->type == IPA_JF_CONST)
2962 {
2963 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2964 if (rdesc)
2965 rdesc->refcount = IPA_UNDESCRIBED_USE;
2966 }
2967 else if (jf->type == IPA_JF_PASS_THROUGH)
2968 ipa_set_controlled_uses (new_root_info,
2969 jf->value.pass_through.formal_id,
2970 IPA_UNDESCRIBED_USE);
2971 }
2972 }
2973
2974 /* Update jump functions and call note functions on inlining the call site CS.
2975 CS is expected to lead to a node already cloned by
2976 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2977 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2978 created. */
2979
2980 bool
2981 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2982 vec<cgraph_edge_p> *new_edges)
2983 {
2984 bool changed;
2985 /* Do nothing if the preparation phase has not been carried out yet
2986 (i.e. during early inlining). */
2987 if (!ipa_node_params_vector.exists ())
2988 return false;
2989 gcc_assert (ipa_edge_args_vector);
2990
2991 propagate_controlled_uses (cs);
2992 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2993
2994 return changed;
2995 }
2996
2997 /* Frees all dynamically allocated structures that the argument info points
2998 to. */
2999
3000 void
3001 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3002 {
3003 vec_free (args->jump_functions);
3004 memset (args, 0, sizeof (*args));
3005 }
3006
3007 /* Free all ipa_edge structures. */
3008
3009 void
3010 ipa_free_all_edge_args (void)
3011 {
3012 int i;
3013 struct ipa_edge_args *args;
3014
3015 if (!ipa_edge_args_vector)
3016 return;
3017
3018 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3019 ipa_free_edge_args_substructures (args);
3020
3021 vec_free (ipa_edge_args_vector);
3022 }
3023
3024 /* Frees all dynamically allocated structures that the param info points
3025 to. */
3026
3027 void
3028 ipa_free_node_params_substructures (struct ipa_node_params *info)
3029 {
3030 info->descriptors.release ();
3031 free (info->lattices);
3032 /* Lattice values and their sources are deallocated with their alocation
3033 pool. */
3034 info->known_vals.release ();
3035 memset (info, 0, sizeof (*info));
3036 }
3037
3038 /* Free all ipa_node_params structures. */
3039
3040 void
3041 ipa_free_all_node_params (void)
3042 {
3043 int i;
3044 struct ipa_node_params *info;
3045
3046 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3047 ipa_free_node_params_substructures (info);
3048
3049 ipa_node_params_vector.release ();
3050 }
3051
3052 /* Set the aggregate replacements of NODE to be AGGVALS. */
3053
3054 void
3055 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3056 struct ipa_agg_replacement_value *aggvals)
3057 {
3058 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3059 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3060
3061 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3062 }
3063
3064 /* Hook that is called by cgraph.c when an edge is removed. */
3065
3066 static void
3067 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3068 {
3069 struct ipa_edge_args *args;
3070
3071 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3072 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3073 return;
3074
3075 args = IPA_EDGE_REF (cs);
3076 if (args->jump_functions)
3077 {
3078 struct ipa_jump_func *jf;
3079 int i;
3080 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3081 {
3082 struct ipa_cst_ref_desc *rdesc;
3083 try_decrement_rdesc_refcount (jf);
3084 if (jf->type == IPA_JF_CONST
3085 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3086 && rdesc->cs == cs)
3087 rdesc->cs = NULL;
3088 }
3089 }
3090
3091 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3092 }
3093
3094 /* Hook that is called by cgraph.c when a node is removed. */
3095
3096 static void
3097 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3098 {
3099 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3100 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3101 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3102 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3103 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3104 }
3105
3106 /* Hook that is called by cgraph.c when an edge is duplicated. */
3107
3108 static void
3109 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3110 __attribute__((unused)) void *data)
3111 {
3112 struct ipa_edge_args *old_args, *new_args;
3113 unsigned int i;
3114
3115 ipa_check_create_edge_args ();
3116
3117 old_args = IPA_EDGE_REF (src);
3118 new_args = IPA_EDGE_REF (dst);
3119
3120 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3121
3122 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3123 {
3124 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3125 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3126
3127 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3128
3129 if (src_jf->type == IPA_JF_CONST)
3130 {
3131 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3132
3133 if (!src_rdesc)
3134 dst_jf->value.constant.rdesc = NULL;
3135 else if (src->caller == dst->caller)
3136 {
3137 struct ipa_ref *ref;
3138 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3139 gcc_checking_assert (n);
3140 ref = ipa_find_reference (src->caller, n,
3141 src->call_stmt, src->lto_stmt_uid);
3142 gcc_checking_assert (ref);
3143 ipa_clone_ref (ref, dst->caller, ref->stmt);
3144
3145 gcc_checking_assert (ipa_refdesc_pool);
3146 struct ipa_cst_ref_desc *dst_rdesc
3147 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3148 dst_rdesc->cs = dst;
3149 dst_rdesc->refcount = src_rdesc->refcount;
3150 dst_rdesc->next_duplicate = NULL;
3151 dst_jf->value.constant.rdesc = dst_rdesc;
3152 }
3153 else if (src_rdesc->cs == src)
3154 {
3155 struct ipa_cst_ref_desc *dst_rdesc;
3156 gcc_checking_assert (ipa_refdesc_pool);
3157 dst_rdesc
3158 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3159 dst_rdesc->cs = dst;
3160 dst_rdesc->refcount = src_rdesc->refcount;
3161 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3162 src_rdesc->next_duplicate = dst_rdesc;
3163 dst_jf->value.constant.rdesc = dst_rdesc;
3164 }
3165 else
3166 {
3167 struct ipa_cst_ref_desc *dst_rdesc;
3168 /* This can happen during inlining, when a JFUNC can refer to a
3169 reference taken in a function up in the tree of inline clones.
3170 We need to find the duplicate that refers to our tree of
3171 inline clones. */
3172
3173 gcc_assert (dst->caller->global.inlined_to);
3174 for (dst_rdesc = src_rdesc->next_duplicate;
3175 dst_rdesc;
3176 dst_rdesc = dst_rdesc->next_duplicate)
3177 {
3178 struct cgraph_node *top;
3179 top = dst_rdesc->cs->caller->global.inlined_to
3180 ? dst_rdesc->cs->caller->global.inlined_to
3181 : dst_rdesc->cs->caller;
3182 if (dst->caller->global.inlined_to == top)
3183 break;
3184 }
3185 gcc_assert (dst_rdesc);
3186 dst_jf->value.constant.rdesc = dst_rdesc;
3187 }
3188 }
3189 }
3190 }
3191
3192 /* Hook that is called by cgraph.c when a node is duplicated. */
3193
3194 static void
3195 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3196 ATTRIBUTE_UNUSED void *data)
3197 {
3198 struct ipa_node_params *old_info, *new_info;
3199 struct ipa_agg_replacement_value *old_av, *new_av;
3200
3201 ipa_check_create_node_params ();
3202 old_info = IPA_NODE_REF (src);
3203 new_info = IPA_NODE_REF (dst);
3204
3205 new_info->descriptors = old_info->descriptors.copy ();
3206 new_info->lattices = NULL;
3207 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3208
3209 new_info->uses_analysis_done = old_info->uses_analysis_done;
3210 new_info->node_enqueued = old_info->node_enqueued;
3211
3212 old_av = ipa_get_agg_replacements_for_node (src);
3213 if (!old_av)
3214 return;
3215
3216 new_av = NULL;
3217 while (old_av)
3218 {
3219 struct ipa_agg_replacement_value *v;
3220
3221 v = ggc_alloc_ipa_agg_replacement_value ();
3222 memcpy (v, old_av, sizeof (*v));
3223 v->next = new_av;
3224 new_av = v;
3225 old_av = old_av->next;
3226 }
3227 ipa_set_node_agg_value_chain (dst, new_av);
3228 }
3229
3230
3231 /* Analyze newly added function into callgraph. */
3232
3233 static void
3234 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3235 {
3236 if (cgraph_function_with_gimple_body_p (node))
3237 ipa_analyze_node (node);
3238 }
3239
3240 /* Register our cgraph hooks if they are not already there. */
3241
3242 void
3243 ipa_register_cgraph_hooks (void)
3244 {
3245 if (!edge_removal_hook_holder)
3246 edge_removal_hook_holder =
3247 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3248 if (!node_removal_hook_holder)
3249 node_removal_hook_holder =
3250 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3251 if (!edge_duplication_hook_holder)
3252 edge_duplication_hook_holder =
3253 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3254 if (!node_duplication_hook_holder)
3255 node_duplication_hook_holder =
3256 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3257 function_insertion_hook_holder =
3258 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3259 }
3260
3261 /* Unregister our cgraph hooks if they are not already there. */
3262
3263 static void
3264 ipa_unregister_cgraph_hooks (void)
3265 {
3266 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3267 edge_removal_hook_holder = NULL;
3268 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3269 node_removal_hook_holder = NULL;
3270 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3271 edge_duplication_hook_holder = NULL;
3272 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3273 node_duplication_hook_holder = NULL;
3274 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3275 function_insertion_hook_holder = NULL;
3276 }
3277
3278 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3279 longer needed after ipa-cp. */
3280
3281 void
3282 ipa_free_all_structures_after_ipa_cp (void)
3283 {
3284 if (!optimize)
3285 {
3286 ipa_free_all_edge_args ();
3287 ipa_free_all_node_params ();
3288 free_alloc_pool (ipcp_sources_pool);
3289 free_alloc_pool (ipcp_values_pool);
3290 free_alloc_pool (ipcp_agg_lattice_pool);
3291 ipa_unregister_cgraph_hooks ();
3292 if (ipa_refdesc_pool)
3293 free_alloc_pool (ipa_refdesc_pool);
3294 }
3295 }
3296
3297 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3298 longer needed after indirect inlining. */
3299
3300 void
3301 ipa_free_all_structures_after_iinln (void)
3302 {
3303 ipa_free_all_edge_args ();
3304 ipa_free_all_node_params ();
3305 ipa_unregister_cgraph_hooks ();
3306 if (ipcp_sources_pool)
3307 free_alloc_pool (ipcp_sources_pool);
3308 if (ipcp_values_pool)
3309 free_alloc_pool (ipcp_values_pool);
3310 if (ipcp_agg_lattice_pool)
3311 free_alloc_pool (ipcp_agg_lattice_pool);
3312 if (ipa_refdesc_pool)
3313 free_alloc_pool (ipa_refdesc_pool);
3314 }
3315
3316 /* Print ipa_tree_map data structures of all functions in the
3317 callgraph to F. */
3318
3319 void
3320 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3321 {
3322 int i, count;
3323 struct ipa_node_params *info;
3324
3325 if (!node->definition)
3326 return;
3327 info = IPA_NODE_REF (node);
3328 fprintf (f, " function %s/%i parameter descriptors:\n",
3329 node->name (), node->order);
3330 count = ipa_get_param_count (info);
3331 for (i = 0; i < count; i++)
3332 {
3333 int c;
3334
3335 fprintf (f, " ");
3336 ipa_dump_param (f, info, i);
3337 if (ipa_is_param_used (info, i))
3338 fprintf (f, " used");
3339 c = ipa_get_controlled_uses (info, i);
3340 if (c == IPA_UNDESCRIBED_USE)
3341 fprintf (f, " undescribed_use");
3342 else
3343 fprintf (f, " controlled_uses=%i", c);
3344 fprintf (f, "\n");
3345 }
3346 }
3347
3348 /* Print ipa_tree_map data structures of all functions in the
3349 callgraph to F. */
3350
3351 void
3352 ipa_print_all_params (FILE * f)
3353 {
3354 struct cgraph_node *node;
3355
3356 fprintf (f, "\nFunction parameters:\n");
3357 FOR_EACH_FUNCTION (node)
3358 ipa_print_node_params (f, node);
3359 }
3360
3361 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3362
3363 vec<tree>
3364 ipa_get_vector_of_formal_parms (tree fndecl)
3365 {
3366 vec<tree> args;
3367 int count;
3368 tree parm;
3369
3370 gcc_assert (!flag_wpa);
3371 count = count_formal_params (fndecl);
3372 args.create (count);
3373 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3374 args.quick_push (parm);
3375
3376 return args;
3377 }
3378
3379 /* Return a heap allocated vector containing types of formal parameters of
3380 function type FNTYPE. */
3381
3382 vec<tree>
3383 ipa_get_vector_of_formal_parm_types (tree fntype)
3384 {
3385 vec<tree> types;
3386 int count = 0;
3387 tree t;
3388
3389 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3390 count++;
3391
3392 types.create (count);
3393 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3394 types.quick_push (TREE_VALUE (t));
3395
3396 return types;
3397 }
3398
3399 /* Modify the function declaration FNDECL and its type according to the plan in
3400 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3401 to reflect the actual parameters being modified which are determined by the
3402 base_index field. */
3403
3404 void
3405 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3406 {
3407 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3408 tree orig_type = TREE_TYPE (fndecl);
3409 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3410
3411 /* The following test is an ugly hack, some functions simply don't have any
3412 arguments in their type. This is probably a bug but well... */
3413 bool care_for_types = (old_arg_types != NULL_TREE);
3414 bool last_parm_void;
3415 vec<tree> otypes;
3416 if (care_for_types)
3417 {
3418 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3419 == void_type_node);
3420 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3421 if (last_parm_void)
3422 gcc_assert (oparms.length () + 1 == otypes.length ());
3423 else
3424 gcc_assert (oparms.length () == otypes.length ());
3425 }
3426 else
3427 {
3428 last_parm_void = false;
3429 otypes.create (0);
3430 }
3431
3432 int len = adjustments.length ();
3433 tree *link = &DECL_ARGUMENTS (fndecl);
3434 tree new_arg_types = NULL;
3435 for (int i = 0; i < len; i++)
3436 {
3437 struct ipa_parm_adjustment *adj;
3438 gcc_assert (link);
3439
3440 adj = &adjustments[i];
3441 tree parm;
3442 if (adj->op == IPA_PARM_OP_NEW)
3443 parm = NULL;
3444 else
3445 parm = oparms[adj->base_index];
3446 adj->base = parm;
3447
3448 if (adj->op == IPA_PARM_OP_COPY)
3449 {
3450 if (care_for_types)
3451 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3452 new_arg_types);
3453 *link = parm;
3454 link = &DECL_CHAIN (parm);
3455 }
3456 else if (adj->op != IPA_PARM_OP_REMOVE)
3457 {
3458 tree new_parm;
3459 tree ptype;
3460
3461 if (adj->by_ref)
3462 ptype = build_pointer_type (adj->type);
3463 else
3464 {
3465 ptype = adj->type;
3466 if (is_gimple_reg_type (ptype))
3467 {
3468 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3469 if (TYPE_ALIGN (ptype) < malign)
3470 ptype = build_aligned_type (ptype, malign);
3471 }
3472 }
3473
3474 if (care_for_types)
3475 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3476
3477 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3478 ptype);
3479 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3480 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3481 DECL_ARTIFICIAL (new_parm) = 1;
3482 DECL_ARG_TYPE (new_parm) = ptype;
3483 DECL_CONTEXT (new_parm) = fndecl;
3484 TREE_USED (new_parm) = 1;
3485 DECL_IGNORED_P (new_parm) = 1;
3486 layout_decl (new_parm, 0);
3487
3488 if (adj->op == IPA_PARM_OP_NEW)
3489 adj->base = NULL;
3490 else
3491 adj->base = parm;
3492 adj->new_decl = new_parm;
3493
3494 *link = new_parm;
3495 link = &DECL_CHAIN (new_parm);
3496 }
3497 }
3498
3499 *link = NULL_TREE;
3500
3501 tree new_reversed = NULL;
3502 if (care_for_types)
3503 {
3504 new_reversed = nreverse (new_arg_types);
3505 if (last_parm_void)
3506 {
3507 if (new_reversed)
3508 TREE_CHAIN (new_arg_types) = void_list_node;
3509 else
3510 new_reversed = void_list_node;
3511 }
3512 }
3513
3514 /* Use copy_node to preserve as much as possible from original type
3515 (debug info, attribute lists etc.)
3516 Exception is METHOD_TYPEs must have THIS argument.
3517 When we are asked to remove it, we need to build new FUNCTION_TYPE
3518 instead. */
3519 tree new_type = NULL;
3520 if (TREE_CODE (orig_type) != METHOD_TYPE
3521 || (adjustments[0].op == IPA_PARM_OP_COPY
3522 && adjustments[0].base_index == 0))
3523 {
3524 new_type = build_distinct_type_copy (orig_type);
3525 TYPE_ARG_TYPES (new_type) = new_reversed;
3526 }
3527 else
3528 {
3529 new_type
3530 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3531 new_reversed));
3532 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3533 DECL_VINDEX (fndecl) = NULL_TREE;
3534 }
3535
3536 /* When signature changes, we need to clear builtin info. */
3537 if (DECL_BUILT_IN (fndecl))
3538 {
3539 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3540 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3541 }
3542
3543 /* This is a new type, not a copy of an old type. Need to reassociate
3544 variants. We can handle everything except the main variant lazily. */
3545 tree t = TYPE_MAIN_VARIANT (orig_type);
3546 if (orig_type != t)
3547 {
3548 TYPE_MAIN_VARIANT (new_type) = t;
3549 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3550 TYPE_NEXT_VARIANT (t) = new_type;
3551 }
3552 else
3553 {
3554 TYPE_MAIN_VARIANT (new_type) = new_type;
3555 TYPE_NEXT_VARIANT (new_type) = NULL;
3556 }
3557
3558 TREE_TYPE (fndecl) = new_type;
3559 DECL_VIRTUAL_P (fndecl) = 0;
3560 otypes.release ();
3561 oparms.release ();
3562 }
3563
3564 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3565 If this is a directly recursive call, CS must be NULL. Otherwise it must
3566 contain the corresponding call graph edge. */
3567
3568 void
3569 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3570 ipa_parm_adjustment_vec adjustments)
3571 {
3572 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3573 vec<tree> vargs;
3574 vec<tree, va_gc> **debug_args = NULL;
3575 gimple new_stmt;
3576 gimple_stmt_iterator gsi, prev_gsi;
3577 tree callee_decl;
3578 int i, len;
3579
3580 len = adjustments.length ();
3581 vargs.create (len);
3582 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3583 ipa_remove_stmt_references (current_node, stmt);
3584
3585 gsi = gsi_for_stmt (stmt);
3586 prev_gsi = gsi;
3587 gsi_prev (&prev_gsi);
3588 for (i = 0; i < len; i++)
3589 {
3590 struct ipa_parm_adjustment *adj;
3591
3592 adj = &adjustments[i];
3593
3594 if (adj->op == IPA_PARM_OP_COPY)
3595 {
3596 tree arg = gimple_call_arg (stmt, adj->base_index);
3597
3598 vargs.quick_push (arg);
3599 }
3600 else if (adj->op != IPA_PARM_OP_REMOVE)
3601 {
3602 tree expr, base, off;
3603 location_t loc;
3604 unsigned int deref_align = 0;
3605 bool deref_base = false;
3606
3607 /* We create a new parameter out of the value of the old one, we can
3608 do the following kind of transformations:
3609
3610 - A scalar passed by reference is converted to a scalar passed by
3611 value. (adj->by_ref is false and the type of the original
3612 actual argument is a pointer to a scalar).
3613
3614 - A part of an aggregate is passed instead of the whole aggregate.
3615 The part can be passed either by value or by reference, this is
3616 determined by value of adj->by_ref. Moreover, the code below
3617 handles both situations when the original aggregate is passed by
3618 value (its type is not a pointer) and when it is passed by
3619 reference (it is a pointer to an aggregate).
3620
3621 When the new argument is passed by reference (adj->by_ref is true)
3622 it must be a part of an aggregate and therefore we form it by
3623 simply taking the address of a reference inside the original
3624 aggregate. */
3625
3626 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3627 base = gimple_call_arg (stmt, adj->base_index);
3628 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3629 : EXPR_LOCATION (base);
3630
3631 if (TREE_CODE (base) != ADDR_EXPR
3632 && POINTER_TYPE_P (TREE_TYPE (base)))
3633 off = build_int_cst (adj->alias_ptr_type,
3634 adj->offset / BITS_PER_UNIT);
3635 else
3636 {
3637 HOST_WIDE_INT base_offset;
3638 tree prev_base;
3639 bool addrof;
3640
3641 if (TREE_CODE (base) == ADDR_EXPR)
3642 {
3643 base = TREE_OPERAND (base, 0);
3644 addrof = true;
3645 }
3646 else
3647 addrof = false;
3648 prev_base = base;
3649 base = get_addr_base_and_unit_offset (base, &base_offset);
3650 /* Aggregate arguments can have non-invariant addresses. */
3651 if (!base)
3652 {
3653 base = build_fold_addr_expr (prev_base);
3654 off = build_int_cst (adj->alias_ptr_type,
3655 adj->offset / BITS_PER_UNIT);
3656 }
3657 else if (TREE_CODE (base) == MEM_REF)
3658 {
3659 if (!addrof)
3660 {
3661 deref_base = true;
3662 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3663 }
3664 off = build_int_cst (adj->alias_ptr_type,
3665 base_offset
3666 + adj->offset / BITS_PER_UNIT);
3667 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3668 off);
3669 base = TREE_OPERAND (base, 0);
3670 }
3671 else
3672 {
3673 off = build_int_cst (adj->alias_ptr_type,
3674 base_offset
3675 + adj->offset / BITS_PER_UNIT);
3676 base = build_fold_addr_expr (base);
3677 }
3678 }
3679
3680 if (!adj->by_ref)
3681 {
3682 tree type = adj->type;
3683 unsigned int align;
3684 unsigned HOST_WIDE_INT misalign;
3685
3686 if (deref_base)
3687 {
3688 align = deref_align;
3689 misalign = 0;
3690 }
3691 else
3692 {
3693 get_pointer_alignment_1 (base, &align, &misalign);
3694 if (TYPE_ALIGN (type) > align)
3695 align = TYPE_ALIGN (type);
3696 }
3697 misalign += (tree_to_double_int (off)
3698 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3699 * BITS_PER_UNIT);
3700 misalign = misalign & (align - 1);
3701 if (misalign != 0)
3702 align = (misalign & -misalign);
3703 if (align < TYPE_ALIGN (type))
3704 type = build_aligned_type (type, align);
3705 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3706 }
3707 else
3708 {
3709 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3710 expr = build_fold_addr_expr (expr);
3711 }
3712
3713 expr = force_gimple_operand_gsi (&gsi, expr,
3714 adj->by_ref
3715 || is_gimple_reg_type (adj->type),
3716 NULL, true, GSI_SAME_STMT);
3717 vargs.quick_push (expr);
3718 }
3719 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
3720 {
3721 unsigned int ix;
3722 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3723 gimple def_temp;
3724
3725 arg = gimple_call_arg (stmt, adj->base_index);
3726 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3727 {
3728 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3729 continue;
3730 arg = fold_convert_loc (gimple_location (stmt),
3731 TREE_TYPE (origin), arg);
3732 }
3733 if (debug_args == NULL)
3734 debug_args = decl_debug_args_insert (callee_decl);
3735 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3736 if (ddecl == origin)
3737 {
3738 ddecl = (**debug_args)[ix + 1];
3739 break;
3740 }
3741 if (ddecl == NULL)
3742 {
3743 ddecl = make_node (DEBUG_EXPR_DECL);
3744 DECL_ARTIFICIAL (ddecl) = 1;
3745 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3746 DECL_MODE (ddecl) = DECL_MODE (origin);
3747
3748 vec_safe_push (*debug_args, origin);
3749 vec_safe_push (*debug_args, ddecl);
3750 }
3751 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3752 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3753 }
3754 }
3755
3756 if (dump_file && (dump_flags & TDF_DETAILS))
3757 {
3758 fprintf (dump_file, "replacing stmt:");
3759 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3760 }
3761
3762 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3763 vargs.release ();
3764 if (gimple_call_lhs (stmt))
3765 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3766
3767 gimple_set_block (new_stmt, gimple_block (stmt));
3768 if (gimple_has_location (stmt))
3769 gimple_set_location (new_stmt, gimple_location (stmt));
3770 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3771 gimple_call_copy_flags (new_stmt, stmt);
3772
3773 if (dump_file && (dump_flags & TDF_DETAILS))
3774 {
3775 fprintf (dump_file, "with stmt:");
3776 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3777 fprintf (dump_file, "\n");
3778 }
3779 gsi_replace (&gsi, new_stmt, true);
3780 if (cs)
3781 cgraph_set_call_stmt (cs, new_stmt);
3782 do
3783 {
3784 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3785 gsi_prev (&gsi);
3786 }
3787 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3788 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3789
3790 update_ssa (TODO_update_ssa);
3791 free_dominance_info (CDI_DOMINATORS);
3792 }
3793
3794 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3795 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3796 specifies whether the function should care about type incompatibility the
3797 current and new expressions. If it is false, the function will leave
3798 incompatibility issues to the caller. Return true iff the expression
3799 was modified. */
3800
3801 bool
3802 ipa_modify_expr (tree *expr, bool convert,
3803 ipa_parm_adjustment_vec adjustments)
3804 {
3805 struct ipa_parm_adjustment *cand
3806 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
3807 if (!cand)
3808 return false;
3809
3810 tree src;
3811 if (cand->by_ref)
3812 src = build_simple_mem_ref (cand->new_decl);
3813 else
3814 src = cand->new_decl;
3815
3816 if (dump_file && (dump_flags & TDF_DETAILS))
3817 {
3818 fprintf (dump_file, "About to replace expr ");
3819 print_generic_expr (dump_file, *expr, 0);
3820 fprintf (dump_file, " with ");
3821 print_generic_expr (dump_file, src, 0);
3822 fprintf (dump_file, "\n");
3823 }
3824
3825 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3826 {
3827 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3828 *expr = vce;
3829 }
3830 else
3831 *expr = src;
3832 return true;
3833 }
3834
3835 /* If T is an SSA_NAME, return NULL if it is not a default def or
3836 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
3837 the base variable is always returned, regardless if it is a default
3838 def. Return T if it is not an SSA_NAME. */
3839
3840 static tree
3841 get_ssa_base_param (tree t, bool ignore_default_def)
3842 {
3843 if (TREE_CODE (t) == SSA_NAME)
3844 {
3845 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
3846 return SSA_NAME_VAR (t);
3847 else
3848 return NULL_TREE;
3849 }
3850 return t;
3851 }
3852
3853 /* Given an expression, return an adjustment entry specifying the
3854 transformation to be done on EXPR. If no suitable adjustment entry
3855 was found, returns NULL.
3856
3857 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
3858 default def, otherwise bail on them.
3859
3860 If CONVERT is non-NULL, this function will set *CONVERT if the
3861 expression provided is a component reference. ADJUSTMENTS is the
3862 adjustments vector. */
3863
3864 ipa_parm_adjustment *
3865 ipa_get_adjustment_candidate (tree **expr, bool *convert,
3866 ipa_parm_adjustment_vec adjustments,
3867 bool ignore_default_def)
3868 {
3869 if (TREE_CODE (**expr) == BIT_FIELD_REF
3870 || TREE_CODE (**expr) == IMAGPART_EXPR
3871 || TREE_CODE (**expr) == REALPART_EXPR)
3872 {
3873 *expr = &TREE_OPERAND (**expr, 0);
3874 if (convert)
3875 *convert = true;
3876 }
3877
3878 HOST_WIDE_INT offset, size, max_size;
3879 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
3880 if (!base || size == -1 || max_size == -1)
3881 return NULL;
3882
3883 if (TREE_CODE (base) == MEM_REF)
3884 {
3885 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
3886 base = TREE_OPERAND (base, 0);
3887 }
3888
3889 base = get_ssa_base_param (base, ignore_default_def);
3890 if (!base || TREE_CODE (base) != PARM_DECL)
3891 return NULL;
3892
3893 struct ipa_parm_adjustment *cand = NULL;
3894 unsigned int len = adjustments.length ();
3895 for (unsigned i = 0; i < len; i++)
3896 {
3897 struct ipa_parm_adjustment *adj = &adjustments[i];
3898
3899 if (adj->base == base
3900 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
3901 {
3902 cand = adj;
3903 break;
3904 }
3905 }
3906
3907 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
3908 return NULL;
3909 return cand;
3910 }
3911
3912 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3913
3914 static bool
3915 index_in_adjustments_multiple_times_p (int base_index,
3916 ipa_parm_adjustment_vec adjustments)
3917 {
3918 int i, len = adjustments.length ();
3919 bool one = false;
3920
3921 for (i = 0; i < len; i++)
3922 {
3923 struct ipa_parm_adjustment *adj;
3924 adj = &adjustments[i];
3925
3926 if (adj->base_index == base_index)
3927 {
3928 if (one)
3929 return true;
3930 else
3931 one = true;
3932 }
3933 }
3934 return false;
3935 }
3936
3937
3938 /* Return adjustments that should have the same effect on function parameters
3939 and call arguments as if they were first changed according to adjustments in
3940 INNER and then by adjustments in OUTER. */
3941
3942 ipa_parm_adjustment_vec
3943 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3944 ipa_parm_adjustment_vec outer)
3945 {
3946 int i, outlen = outer.length ();
3947 int inlen = inner.length ();
3948 int removals = 0;
3949 ipa_parm_adjustment_vec adjustments, tmp;
3950
3951 tmp.create (inlen);
3952 for (i = 0; i < inlen; i++)
3953 {
3954 struct ipa_parm_adjustment *n;
3955 n = &inner[i];
3956
3957 if (n->op == IPA_PARM_OP_REMOVE)
3958 removals++;
3959 else
3960 {
3961 /* FIXME: Handling of new arguments are not implemented yet. */
3962 gcc_assert (n->op != IPA_PARM_OP_NEW);
3963 tmp.quick_push (*n);
3964 }
3965 }
3966
3967 adjustments.create (outlen + removals);
3968 for (i = 0; i < outlen; i++)
3969 {
3970 struct ipa_parm_adjustment r;
3971 struct ipa_parm_adjustment *out = &outer[i];
3972 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3973
3974 memset (&r, 0, sizeof (r));
3975 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
3976 if (out->op == IPA_PARM_OP_REMOVE)
3977 {
3978 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3979 {
3980 r.op = IPA_PARM_OP_REMOVE;
3981 adjustments.quick_push (r);
3982 }
3983 continue;
3984 }
3985 else
3986 {
3987 /* FIXME: Handling of new arguments are not implemented yet. */
3988 gcc_assert (out->op != IPA_PARM_OP_NEW);
3989 }
3990
3991 r.base_index = in->base_index;
3992 r.type = out->type;
3993
3994 /* FIXME: Create nonlocal value too. */
3995
3996 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
3997 r.op = IPA_PARM_OP_COPY;
3998 else if (in->op == IPA_PARM_OP_COPY)
3999 r.offset = out->offset;
4000 else if (out->op == IPA_PARM_OP_COPY)
4001 r.offset = in->offset;
4002 else
4003 r.offset = in->offset + out->offset;
4004 adjustments.quick_push (r);
4005 }
4006
4007 for (i = 0; i < inlen; i++)
4008 {
4009 struct ipa_parm_adjustment *n = &inner[i];
4010
4011 if (n->op == IPA_PARM_OP_REMOVE)
4012 adjustments.quick_push (*n);
4013 }
4014
4015 tmp.release ();
4016 return adjustments;
4017 }
4018
4019 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4020 friendly way, assuming they are meant to be applied to FNDECL. */
4021
4022 void
4023 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4024 tree fndecl)
4025 {
4026 int i, len = adjustments.length ();
4027 bool first = true;
4028 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4029
4030 fprintf (file, "IPA param adjustments: ");
4031 for (i = 0; i < len; i++)
4032 {
4033 struct ipa_parm_adjustment *adj;
4034 adj = &adjustments[i];
4035
4036 if (!first)
4037 fprintf (file, " ");
4038 else
4039 first = false;
4040
4041 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4042 print_generic_expr (file, parms[adj->base_index], 0);
4043 if (adj->base)
4044 {
4045 fprintf (file, ", base: ");
4046 print_generic_expr (file, adj->base, 0);
4047 }
4048 if (adj->new_decl)
4049 {
4050 fprintf (file, ", new_decl: ");
4051 print_generic_expr (file, adj->new_decl, 0);
4052 }
4053 if (adj->new_ssa_base)
4054 {
4055 fprintf (file, ", new_ssa_base: ");
4056 print_generic_expr (file, adj->new_ssa_base, 0);
4057 }
4058
4059 if (adj->op == IPA_PARM_OP_COPY)
4060 fprintf (file, ", copy_param");
4061 else if (adj->op == IPA_PARM_OP_REMOVE)
4062 fprintf (file, ", remove_param");
4063 else
4064 fprintf (file, ", offset %li", (long) adj->offset);
4065 if (adj->by_ref)
4066 fprintf (file, ", by_ref");
4067 print_node_brief (file, ", type: ", adj->type, 0);
4068 fprintf (file, "\n");
4069 }
4070 parms.release ();
4071 }
4072
4073 /* Dump the AV linked list. */
4074
4075 void
4076 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4077 {
4078 bool comma = false;
4079 fprintf (f, " Aggregate replacements:");
4080 for (; av; av = av->next)
4081 {
4082 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4083 av->index, av->offset);
4084 print_generic_expr (f, av->value, 0);
4085 comma = true;
4086 }
4087 fprintf (f, "\n");
4088 }
4089
4090 /* Stream out jump function JUMP_FUNC to OB. */
4091
4092 static void
4093 ipa_write_jump_function (struct output_block *ob,
4094 struct ipa_jump_func *jump_func)
4095 {
4096 struct ipa_agg_jf_item *item;
4097 struct bitpack_d bp;
4098 int i, count;
4099
4100 streamer_write_uhwi (ob, jump_func->type);
4101 switch (jump_func->type)
4102 {
4103 case IPA_JF_UNKNOWN:
4104 break;
4105 case IPA_JF_KNOWN_TYPE:
4106 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4107 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4108 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4109 break;
4110 case IPA_JF_CONST:
4111 gcc_assert (
4112 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4113 stream_write_tree (ob, jump_func->value.constant.value, true);
4114 break;
4115 case IPA_JF_PASS_THROUGH:
4116 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4117 if (jump_func->value.pass_through.operation == NOP_EXPR)
4118 {
4119 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4120 bp = bitpack_create (ob->main_stream);
4121 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4122 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4123 streamer_write_bitpack (&bp);
4124 }
4125 else
4126 {
4127 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4128 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4129 }
4130 break;
4131 case IPA_JF_ANCESTOR:
4132 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4133 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4134 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4135 bp = bitpack_create (ob->main_stream);
4136 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4137 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4138 streamer_write_bitpack (&bp);
4139 break;
4140 }
4141
4142 count = vec_safe_length (jump_func->agg.items);
4143 streamer_write_uhwi (ob, count);
4144 if (count)
4145 {
4146 bp = bitpack_create (ob->main_stream);
4147 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4148 streamer_write_bitpack (&bp);
4149 }
4150
4151 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4152 {
4153 streamer_write_uhwi (ob, item->offset);
4154 stream_write_tree (ob, item->value, true);
4155 }
4156 }
4157
4158 /* Read in jump function JUMP_FUNC from IB. */
4159
4160 static void
4161 ipa_read_jump_function (struct lto_input_block *ib,
4162 struct ipa_jump_func *jump_func,
4163 struct cgraph_edge *cs,
4164 struct data_in *data_in)
4165 {
4166 enum jump_func_type jftype;
4167 enum tree_code operation;
4168 int i, count;
4169
4170 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4171 switch (jftype)
4172 {
4173 case IPA_JF_UNKNOWN:
4174 jump_func->type = IPA_JF_UNKNOWN;
4175 break;
4176 case IPA_JF_KNOWN_TYPE:
4177 {
4178 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4179 tree base_type = stream_read_tree (ib, data_in);
4180 tree component_type = stream_read_tree (ib, data_in);
4181
4182 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4183 break;
4184 }
4185 case IPA_JF_CONST:
4186 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4187 break;
4188 case IPA_JF_PASS_THROUGH:
4189 operation = (enum tree_code) streamer_read_uhwi (ib);
4190 if (operation == NOP_EXPR)
4191 {
4192 int formal_id = streamer_read_uhwi (ib);
4193 struct bitpack_d bp = streamer_read_bitpack (ib);
4194 bool agg_preserved = bp_unpack_value (&bp, 1);
4195 bool type_preserved = bp_unpack_value (&bp, 1);
4196 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4197 type_preserved);
4198 }
4199 else
4200 {
4201 tree operand = stream_read_tree (ib, data_in);
4202 int formal_id = streamer_read_uhwi (ib);
4203 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4204 operation);
4205 }
4206 break;
4207 case IPA_JF_ANCESTOR:
4208 {
4209 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4210 tree type = stream_read_tree (ib, data_in);
4211 int formal_id = streamer_read_uhwi (ib);
4212 struct bitpack_d bp = streamer_read_bitpack (ib);
4213 bool agg_preserved = bp_unpack_value (&bp, 1);
4214 bool type_preserved = bp_unpack_value (&bp, 1);
4215
4216 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4217 type_preserved);
4218 break;
4219 }
4220 }
4221
4222 count = streamer_read_uhwi (ib);
4223 vec_alloc (jump_func->agg.items, count);
4224 if (count)
4225 {
4226 struct bitpack_d bp = streamer_read_bitpack (ib);
4227 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4228 }
4229 for (i = 0; i < count; i++)
4230 {
4231 struct ipa_agg_jf_item item;
4232 item.offset = streamer_read_uhwi (ib);
4233 item.value = stream_read_tree (ib, data_in);
4234 jump_func->agg.items->quick_push (item);
4235 }
4236 }
4237
4238 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4239 relevant to indirect inlining to OB. */
4240
4241 static void
4242 ipa_write_indirect_edge_info (struct output_block *ob,
4243 struct cgraph_edge *cs)
4244 {
4245 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4246 struct bitpack_d bp;
4247
4248 streamer_write_hwi (ob, ii->param_index);
4249 streamer_write_hwi (ob, ii->offset);
4250 bp = bitpack_create (ob->main_stream);
4251 bp_pack_value (&bp, ii->polymorphic, 1);
4252 bp_pack_value (&bp, ii->agg_contents, 1);
4253 bp_pack_value (&bp, ii->member_ptr, 1);
4254 bp_pack_value (&bp, ii->by_ref, 1);
4255 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4256 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4257 streamer_write_bitpack (&bp);
4258
4259 if (ii->polymorphic)
4260 {
4261 streamer_write_hwi (ob, ii->otr_token);
4262 stream_write_tree (ob, ii->otr_type, true);
4263 stream_write_tree (ob, ii->outer_type, true);
4264 }
4265 }
4266
4267 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4268 relevant to indirect inlining from IB. */
4269
4270 static void
4271 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4272 struct data_in *data_in ATTRIBUTE_UNUSED,
4273 struct cgraph_edge *cs)
4274 {
4275 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4276 struct bitpack_d bp;
4277
4278 ii->param_index = (int) streamer_read_hwi (ib);
4279 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4280 bp = streamer_read_bitpack (ib);
4281 ii->polymorphic = bp_unpack_value (&bp, 1);
4282 ii->agg_contents = bp_unpack_value (&bp, 1);
4283 ii->member_ptr = bp_unpack_value (&bp, 1);
4284 ii->by_ref = bp_unpack_value (&bp, 1);
4285 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4286 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4287 if (ii->polymorphic)
4288 {
4289 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4290 ii->otr_type = stream_read_tree (ib, data_in);
4291 ii->outer_type = stream_read_tree (ib, data_in);
4292 }
4293 }
4294
4295 /* Stream out NODE info to OB. */
4296
4297 static void
4298 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4299 {
4300 int node_ref;
4301 lto_symtab_encoder_t encoder;
4302 struct ipa_node_params *info = IPA_NODE_REF (node);
4303 int j;
4304 struct cgraph_edge *e;
4305 struct bitpack_d bp;
4306
4307 encoder = ob->decl_state->symtab_node_encoder;
4308 node_ref = lto_symtab_encoder_encode (encoder, node);
4309 streamer_write_uhwi (ob, node_ref);
4310
4311 streamer_write_uhwi (ob, ipa_get_param_count (info));
4312 for (j = 0; j < ipa_get_param_count (info); j++)
4313 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4314 bp = bitpack_create (ob->main_stream);
4315 gcc_assert (info->uses_analysis_done
4316 || ipa_get_param_count (info) == 0);
4317 gcc_assert (!info->node_enqueued);
4318 gcc_assert (!info->ipcp_orig_node);
4319 for (j = 0; j < ipa_get_param_count (info); j++)
4320 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4321 streamer_write_bitpack (&bp);
4322 for (j = 0; j < ipa_get_param_count (info); j++)
4323 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4324 for (e = node->callees; e; e = e->next_callee)
4325 {
4326 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4327
4328 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4329 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4330 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4331 }
4332 for (e = node->indirect_calls; e; e = e->next_callee)
4333 {
4334 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4335
4336 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4337 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4338 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4339 ipa_write_indirect_edge_info (ob, e);
4340 }
4341 }
4342
4343 /* Stream in NODE info from IB. */
4344
4345 static void
4346 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4347 struct data_in *data_in)
4348 {
4349 struct ipa_node_params *info = IPA_NODE_REF (node);
4350 int k;
4351 struct cgraph_edge *e;
4352 struct bitpack_d bp;
4353
4354 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4355
4356 for (k = 0; k < ipa_get_param_count (info); k++)
4357 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4358
4359 bp = streamer_read_bitpack (ib);
4360 if (ipa_get_param_count (info) != 0)
4361 info->uses_analysis_done = true;
4362 info->node_enqueued = false;
4363 for (k = 0; k < ipa_get_param_count (info); k++)
4364 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4365 for (k = 0; k < ipa_get_param_count (info); k++)
4366 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4367 for (e = node->callees; e; e = e->next_callee)
4368 {
4369 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4370 int count = streamer_read_uhwi (ib);
4371
4372 if (!count)
4373 continue;
4374 vec_safe_grow_cleared (args->jump_functions, count);
4375
4376 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4377 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4378 data_in);
4379 }
4380 for (e = node->indirect_calls; e; e = e->next_callee)
4381 {
4382 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4383 int count = streamer_read_uhwi (ib);
4384
4385 if (count)
4386 {
4387 vec_safe_grow_cleared (args->jump_functions, count);
4388 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4389 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4390 data_in);
4391 }
4392 ipa_read_indirect_edge_info (ib, data_in, e);
4393 }
4394 }
4395
4396 /* Write jump functions for nodes in SET. */
4397
4398 void
4399 ipa_prop_write_jump_functions (void)
4400 {
4401 struct cgraph_node *node;
4402 struct output_block *ob;
4403 unsigned int count = 0;
4404 lto_symtab_encoder_iterator lsei;
4405 lto_symtab_encoder_t encoder;
4406
4407
4408 if (!ipa_node_params_vector.exists ())
4409 return;
4410
4411 ob = create_output_block (LTO_section_jump_functions);
4412 encoder = ob->decl_state->symtab_node_encoder;
4413 ob->cgraph_node = NULL;
4414 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4415 lsei_next_function_in_partition (&lsei))
4416 {
4417 node = lsei_cgraph_node (lsei);
4418 if (cgraph_function_with_gimple_body_p (node)
4419 && IPA_NODE_REF (node) != NULL)
4420 count++;
4421 }
4422
4423 streamer_write_uhwi (ob, count);
4424
4425 /* Process all of the functions. */
4426 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4427 lsei_next_function_in_partition (&lsei))
4428 {
4429 node = lsei_cgraph_node (lsei);
4430 if (cgraph_function_with_gimple_body_p (node)
4431 && IPA_NODE_REF (node) != NULL)
4432 ipa_write_node_info (ob, node);
4433 }
4434 streamer_write_char_stream (ob->main_stream, 0);
4435 produce_asm (ob, NULL);
4436 destroy_output_block (ob);
4437 }
4438
4439 /* Read section in file FILE_DATA of length LEN with data DATA. */
4440
4441 static void
4442 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4443 size_t len)
4444 {
4445 const struct lto_function_header *header =
4446 (const struct lto_function_header *) data;
4447 const int cfg_offset = sizeof (struct lto_function_header);
4448 const int main_offset = cfg_offset + header->cfg_size;
4449 const int string_offset = main_offset + header->main_size;
4450 struct data_in *data_in;
4451 struct lto_input_block ib_main;
4452 unsigned int i;
4453 unsigned int count;
4454
4455 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4456 header->main_size);
4457
4458 data_in =
4459 lto_data_in_create (file_data, (const char *) data + string_offset,
4460 header->string_size, vNULL);
4461 count = streamer_read_uhwi (&ib_main);
4462
4463 for (i = 0; i < count; i++)
4464 {
4465 unsigned int index;
4466 struct cgraph_node *node;
4467 lto_symtab_encoder_t encoder;
4468
4469 index = streamer_read_uhwi (&ib_main);
4470 encoder = file_data->symtab_node_encoder;
4471 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4472 gcc_assert (node->definition);
4473 ipa_read_node_info (&ib_main, node, data_in);
4474 }
4475 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4476 len);
4477 lto_data_in_delete (data_in);
4478 }
4479
4480 /* Read ipcp jump functions. */
4481
4482 void
4483 ipa_prop_read_jump_functions (void)
4484 {
4485 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4486 struct lto_file_decl_data *file_data;
4487 unsigned int j = 0;
4488
4489 ipa_check_create_node_params ();
4490 ipa_check_create_edge_args ();
4491 ipa_register_cgraph_hooks ();
4492
4493 while ((file_data = file_data_vec[j++]))
4494 {
4495 size_t len;
4496 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4497
4498 if (data)
4499 ipa_prop_read_section (file_data, data, len);
4500 }
4501 }
4502
4503 /* After merging units, we can get mismatch in argument counts.
4504 Also decl merging might've rendered parameter lists obsolete.
4505 Also compute called_with_variable_arg info. */
4506
4507 void
4508 ipa_update_after_lto_read (void)
4509 {
4510 ipa_check_create_node_params ();
4511 ipa_check_create_edge_args ();
4512 }
4513
4514 void
4515 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4516 {
4517 int node_ref;
4518 unsigned int count = 0;
4519 lto_symtab_encoder_t encoder;
4520 struct ipa_agg_replacement_value *aggvals, *av;
4521
4522 aggvals = ipa_get_agg_replacements_for_node (node);
4523 encoder = ob->decl_state->symtab_node_encoder;
4524 node_ref = lto_symtab_encoder_encode (encoder, node);
4525 streamer_write_uhwi (ob, node_ref);
4526
4527 for (av = aggvals; av; av = av->next)
4528 count++;
4529 streamer_write_uhwi (ob, count);
4530
4531 for (av = aggvals; av; av = av->next)
4532 {
4533 struct bitpack_d bp;
4534
4535 streamer_write_uhwi (ob, av->offset);
4536 streamer_write_uhwi (ob, av->index);
4537 stream_write_tree (ob, av->value, true);
4538
4539 bp = bitpack_create (ob->main_stream);
4540 bp_pack_value (&bp, av->by_ref, 1);
4541 streamer_write_bitpack (&bp);
4542 }
4543 }
4544
4545 /* Stream in the aggregate value replacement chain for NODE from IB. */
4546
4547 static void
4548 read_agg_replacement_chain (struct lto_input_block *ib,
4549 struct cgraph_node *node,
4550 struct data_in *data_in)
4551 {
4552 struct ipa_agg_replacement_value *aggvals = NULL;
4553 unsigned int count, i;
4554
4555 count = streamer_read_uhwi (ib);
4556 for (i = 0; i <count; i++)
4557 {
4558 struct ipa_agg_replacement_value *av;
4559 struct bitpack_d bp;
4560
4561 av = ggc_alloc_ipa_agg_replacement_value ();
4562 av->offset = streamer_read_uhwi (ib);
4563 av->index = streamer_read_uhwi (ib);
4564 av->value = stream_read_tree (ib, data_in);
4565 bp = streamer_read_bitpack (ib);
4566 av->by_ref = bp_unpack_value (&bp, 1);
4567 av->next = aggvals;
4568 aggvals = av;
4569 }
4570 ipa_set_node_agg_value_chain (node, aggvals);
4571 }
4572
4573 /* Write all aggregate replacement for nodes in set. */
4574
4575 void
4576 ipa_prop_write_all_agg_replacement (void)
4577 {
4578 struct cgraph_node *node;
4579 struct output_block *ob;
4580 unsigned int count = 0;
4581 lto_symtab_encoder_iterator lsei;
4582 lto_symtab_encoder_t encoder;
4583
4584 if (!ipa_node_agg_replacements)
4585 return;
4586
4587 ob = create_output_block (LTO_section_ipcp_transform);
4588 encoder = ob->decl_state->symtab_node_encoder;
4589 ob->cgraph_node = NULL;
4590 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4591 lsei_next_function_in_partition (&lsei))
4592 {
4593 node = lsei_cgraph_node (lsei);
4594 if (cgraph_function_with_gimple_body_p (node)
4595 && ipa_get_agg_replacements_for_node (node) != NULL)
4596 count++;
4597 }
4598
4599 streamer_write_uhwi (ob, count);
4600
4601 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4602 lsei_next_function_in_partition (&lsei))
4603 {
4604 node = lsei_cgraph_node (lsei);
4605 if (cgraph_function_with_gimple_body_p (node)
4606 && ipa_get_agg_replacements_for_node (node) != NULL)
4607 write_agg_replacement_chain (ob, node);
4608 }
4609 streamer_write_char_stream (ob->main_stream, 0);
4610 produce_asm (ob, NULL);
4611 destroy_output_block (ob);
4612 }
4613
4614 /* Read replacements section in file FILE_DATA of length LEN with data
4615 DATA. */
4616
4617 static void
4618 read_replacements_section (struct lto_file_decl_data *file_data,
4619 const char *data,
4620 size_t len)
4621 {
4622 const struct lto_function_header *header =
4623 (const struct lto_function_header *) data;
4624 const int cfg_offset = sizeof (struct lto_function_header);
4625 const int main_offset = cfg_offset + header->cfg_size;
4626 const int string_offset = main_offset + header->main_size;
4627 struct data_in *data_in;
4628 struct lto_input_block ib_main;
4629 unsigned int i;
4630 unsigned int count;
4631
4632 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4633 header->main_size);
4634
4635 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4636 header->string_size, vNULL);
4637 count = streamer_read_uhwi (&ib_main);
4638
4639 for (i = 0; i < count; i++)
4640 {
4641 unsigned int index;
4642 struct cgraph_node *node;
4643 lto_symtab_encoder_t encoder;
4644
4645 index = streamer_read_uhwi (&ib_main);
4646 encoder = file_data->symtab_node_encoder;
4647 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4648 gcc_assert (node->definition);
4649 read_agg_replacement_chain (&ib_main, node, data_in);
4650 }
4651 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4652 len);
4653 lto_data_in_delete (data_in);
4654 }
4655
4656 /* Read IPA-CP aggregate replacements. */
4657
4658 void
4659 ipa_prop_read_all_agg_replacement (void)
4660 {
4661 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4662 struct lto_file_decl_data *file_data;
4663 unsigned int j = 0;
4664
4665 while ((file_data = file_data_vec[j++]))
4666 {
4667 size_t len;
4668 const char *data = lto_get_section_data (file_data,
4669 LTO_section_ipcp_transform,
4670 NULL, &len);
4671 if (data)
4672 read_replacements_section (file_data, data, len);
4673 }
4674 }
4675
4676 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4677 NODE. */
4678
4679 static void
4680 adjust_agg_replacement_values (struct cgraph_node *node,
4681 struct ipa_agg_replacement_value *aggval)
4682 {
4683 struct ipa_agg_replacement_value *v;
4684 int i, c = 0, d = 0, *adj;
4685
4686 if (!node->clone.combined_args_to_skip)
4687 return;
4688
4689 for (v = aggval; v; v = v->next)
4690 {
4691 gcc_assert (v->index >= 0);
4692 if (c < v->index)
4693 c = v->index;
4694 }
4695 c++;
4696
4697 adj = XALLOCAVEC (int, c);
4698 for (i = 0; i < c; i++)
4699 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4700 {
4701 adj[i] = -1;
4702 d++;
4703 }
4704 else
4705 adj[i] = i - d;
4706
4707 for (v = aggval; v; v = v->next)
4708 v->index = adj[v->index];
4709 }
4710
4711
4712 /* Function body transformation phase. */
4713
4714 unsigned int
4715 ipcp_transform_function (struct cgraph_node *node)
4716 {
4717 vec<ipa_param_descriptor> descriptors = vNULL;
4718 struct param_analysis_info *parms_ainfo;
4719 struct ipa_agg_replacement_value *aggval;
4720 gimple_stmt_iterator gsi;
4721 basic_block bb;
4722 int param_count;
4723 bool cfg_changed = false, something_changed = false;
4724
4725 gcc_checking_assert (cfun);
4726 gcc_checking_assert (current_function_decl);
4727
4728 if (dump_file)
4729 fprintf (dump_file, "Modification phase of node %s/%i\n",
4730 node->name (), node->order);
4731
4732 aggval = ipa_get_agg_replacements_for_node (node);
4733 if (!aggval)
4734 return 0;
4735 param_count = count_formal_params (node->decl);
4736 if (param_count == 0)
4737 return 0;
4738 adjust_agg_replacement_values (node, aggval);
4739 if (dump_file)
4740 ipa_dump_agg_replacement_values (dump_file, aggval);
4741 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4742 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4743 descriptors.safe_grow_cleared (param_count);
4744 ipa_populate_param_decls (node, descriptors);
4745
4746 FOR_EACH_BB_FN (bb, cfun)
4747 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4748 {
4749 struct ipa_agg_replacement_value *v;
4750 gimple stmt = gsi_stmt (gsi);
4751 tree rhs, val, t;
4752 HOST_WIDE_INT offset, size;
4753 int index;
4754 bool by_ref, vce;
4755
4756 if (!gimple_assign_load_p (stmt))
4757 continue;
4758 rhs = gimple_assign_rhs1 (stmt);
4759 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4760 continue;
4761
4762 vce = false;
4763 t = rhs;
4764 while (handled_component_p (t))
4765 {
4766 /* V_C_E can do things like convert an array of integers to one
4767 bigger integer and similar things we do not handle below. */
4768 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4769 {
4770 vce = true;
4771 break;
4772 }
4773 t = TREE_OPERAND (t, 0);
4774 }
4775 if (vce)
4776 continue;
4777
4778 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4779 rhs, &index, &offset, &size, &by_ref))
4780 continue;
4781 for (v = aggval; v; v = v->next)
4782 if (v->index == index
4783 && v->offset == offset)
4784 break;
4785 if (!v
4786 || v->by_ref != by_ref
4787 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4788 continue;
4789
4790 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4791 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4792 {
4793 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4794 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4795 else if (TYPE_SIZE (TREE_TYPE (rhs))
4796 == TYPE_SIZE (TREE_TYPE (v->value)))
4797 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4798 else
4799 {
4800 if (dump_file)
4801 {
4802 fprintf (dump_file, " const ");
4803 print_generic_expr (dump_file, v->value, 0);
4804 fprintf (dump_file, " can't be converted to type of ");
4805 print_generic_expr (dump_file, rhs, 0);
4806 fprintf (dump_file, "\n");
4807 }
4808 continue;
4809 }
4810 }
4811 else
4812 val = v->value;
4813
4814 if (dump_file && (dump_flags & TDF_DETAILS))
4815 {
4816 fprintf (dump_file, "Modifying stmt:\n ");
4817 print_gimple_stmt (dump_file, stmt, 0, 0);
4818 }
4819 gimple_assign_set_rhs_from_tree (&gsi, val);
4820 update_stmt (stmt);
4821
4822 if (dump_file && (dump_flags & TDF_DETAILS))
4823 {
4824 fprintf (dump_file, "into:\n ");
4825 print_gimple_stmt (dump_file, stmt, 0, 0);
4826 fprintf (dump_file, "\n");
4827 }
4828
4829 something_changed = true;
4830 if (maybe_clean_eh_stmt (stmt)
4831 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4832 cfg_changed = true;
4833 }
4834
4835 (*ipa_node_agg_replacements)[node->uid] = NULL;
4836 free_parms_ainfo (parms_ainfo, param_count);
4837 descriptors.release ();
4838
4839 if (!something_changed)
4840 return 0;
4841 else if (cfg_changed)
4842 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4843 else
4844 return TODO_update_ssa_only_virtuals;
4845 }