ipa-prop.h (ipa_ancestor_jf_data): Update ocmment.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60
61 /* Intermediate information about a parameter that is only useful during the
62 run of ipa_analyze_node and is not kept afterwards. */
63
64 struct param_analysis_info
65 {
66 bool parm_modified, ref_modified, pt_modified;
67 bitmap parm_visited_statements, pt_visited_statements;
68 };
69
70 /* Vector where the parameter infos are actually stored. */
71 vec<ipa_node_params> ipa_node_params_vector;
72 /* Vector of known aggregate values in cloned nodes. */
73 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
74 /* Vector where the parameter infos are actually stored. */
75 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
76
77 /* Holders of ipa cgraph hooks: */
78 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
79 static struct cgraph_node_hook_list *node_removal_hook_holder;
80 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
81 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
82 static struct cgraph_node_hook_list *function_insertion_hook_holder;
83
84 /* Description of a reference to an IPA constant. */
85 struct ipa_cst_ref_desc
86 {
87 /* Edge that corresponds to the statement which took the reference. */
88 struct cgraph_edge *cs;
89 /* Linked list of duplicates created when call graph edges are cloned. */
90 struct ipa_cst_ref_desc *next_duplicate;
91 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
92 if out of control. */
93 int refcount;
94 };
95
96 /* Allocation pool for reference descriptions. */
97
98 static alloc_pool ipa_refdesc_pool;
99
100 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
101 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
102
103 static bool
104 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
105 {
106 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
107 struct cl_optimization *os;
108
109 if (!fs_opts)
110 return false;
111 os = TREE_OPTIMIZATION (fs_opts);
112 return !os->x_optimize || !os->x_flag_ipa_cp;
113 }
114
115 /* Return index of the formal whose tree is PTREE in function which corresponds
116 to INFO. */
117
118 static int
119 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
120 {
121 int i, count;
122
123 count = descriptors.length ();
124 for (i = 0; i < count; i++)
125 if (descriptors[i].decl == ptree)
126 return i;
127
128 return -1;
129 }
130
131 /* Return index of the formal whose tree is PTREE in function which corresponds
132 to INFO. */
133
134 int
135 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
136 {
137 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
138 }
139
140 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
141 NODE. */
142
143 static void
144 ipa_populate_param_decls (struct cgraph_node *node,
145 vec<ipa_param_descriptor> &descriptors)
146 {
147 tree fndecl;
148 tree fnargs;
149 tree parm;
150 int param_num;
151
152 fndecl = node->decl;
153 gcc_assert (gimple_has_body_p (fndecl));
154 fnargs = DECL_ARGUMENTS (fndecl);
155 param_num = 0;
156 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
157 {
158 descriptors[param_num].decl = parm;
159 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
160 param_num++;
161 }
162 }
163
164 /* Return how many formal parameters FNDECL has. */
165
166 static inline int
167 count_formal_params (tree fndecl)
168 {
169 tree parm;
170 int count = 0;
171 gcc_assert (gimple_has_body_p (fndecl));
172
173 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
174 count++;
175
176 return count;
177 }
178
179 /* Return the declaration of Ith formal parameter of the function corresponding
180 to INFO. Note there is no setter function as this array is built just once
181 using ipa_initialize_node_params. */
182
183 void
184 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
185 {
186 fprintf (file, "param #%i", i);
187 if (info->descriptors[i].decl)
188 {
189 fprintf (file, " ");
190 print_generic_expr (file, info->descriptors[i].decl, 0);
191 }
192 }
193
194 /* Initialize the ipa_node_params structure associated with NODE
195 to hold PARAM_COUNT parameters. */
196
197 void
198 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
199 {
200 struct ipa_node_params *info = IPA_NODE_REF (node);
201
202 if (!info->descriptors.exists () && param_count)
203 info->descriptors.safe_grow_cleared (param_count);
204 }
205
206 /* Initialize the ipa_node_params structure associated with NODE by counting
207 the function parameters, creating the descriptors and populating their
208 param_decls. */
209
210 void
211 ipa_initialize_node_params (struct cgraph_node *node)
212 {
213 struct ipa_node_params *info = IPA_NODE_REF (node);
214
215 if (!info->descriptors.exists ())
216 {
217 ipa_alloc_node_params (node, count_formal_params (node->decl));
218 ipa_populate_param_decls (node, info->descriptors);
219 }
220 }
221
222 /* Print the jump functions associated with call graph edge CS to file F. */
223
224 static void
225 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
226 {
227 int i, count;
228
229 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
230 for (i = 0; i < count; i++)
231 {
232 struct ipa_jump_func *jump_func;
233 enum jump_func_type type;
234
235 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
236 type = jump_func->type;
237
238 fprintf (f, " param %d: ", i);
239 if (type == IPA_JF_UNKNOWN)
240 fprintf (f, "UNKNOWN\n");
241 else if (type == IPA_JF_KNOWN_TYPE)
242 {
243 fprintf (f, "KNOWN TYPE: base ");
244 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
245 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
246 jump_func->value.known_type.offset);
247 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
248 fprintf (f, "\n");
249 }
250 else if (type == IPA_JF_CONST)
251 {
252 tree val = jump_func->value.constant.value;
253 fprintf (f, "CONST: ");
254 print_generic_expr (f, val, 0);
255 if (TREE_CODE (val) == ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
257 {
258 fprintf (f, " -> ");
259 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
260 0);
261 }
262 fprintf (f, "\n");
263 }
264 else if (type == IPA_JF_PASS_THROUGH)
265 {
266 fprintf (f, "PASS THROUGH: ");
267 fprintf (f, "%d, op %s",
268 jump_func->value.pass_through.formal_id,
269 get_tree_code_name(jump_func->value.pass_through.operation));
270 if (jump_func->value.pass_through.operation != NOP_EXPR)
271 {
272 fprintf (f, " ");
273 print_generic_expr (f,
274 jump_func->value.pass_through.operand, 0);
275 }
276 if (jump_func->value.pass_through.agg_preserved)
277 fprintf (f, ", agg_preserved");
278 if (jump_func->value.pass_through.type_preserved)
279 fprintf (f, ", type_preserved");
280 fprintf (f, "\n");
281 }
282 else if (type == IPA_JF_ANCESTOR)
283 {
284 fprintf (f, "ANCESTOR: ");
285 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
286 jump_func->value.ancestor.formal_id,
287 jump_func->value.ancestor.offset);
288 print_generic_expr (f, jump_func->value.ancestor.type, 0);
289 if (jump_func->value.ancestor.agg_preserved)
290 fprintf (f, ", agg_preserved");
291 if (jump_func->value.ancestor.type_preserved)
292 fprintf (f, ", type_preserved");
293 fprintf (f, "\n");
294 }
295
296 if (jump_func->agg.items)
297 {
298 struct ipa_agg_jf_item *item;
299 int j;
300
301 fprintf (f, " Aggregate passed by %s:\n",
302 jump_func->agg.by_ref ? "reference" : "value");
303 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
304 {
305 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
306 item->offset);
307 if (TYPE_P (item->value))
308 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
309 tree_to_uhwi (TYPE_SIZE (item->value)));
310 else
311 {
312 fprintf (f, "cst: ");
313 print_generic_expr (f, item->value, 0);
314 }
315 fprintf (f, "\n");
316 }
317 }
318 }
319 }
320
321
322 /* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
324
325 void
326 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
327 {
328 struct cgraph_edge *cs;
329
330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
331 node->order);
332 for (cs = node->callees; cs; cs = cs->next_callee)
333 {
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
336
337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
338 xstrdup (node->name ()), node->order,
339 xstrdup (cs->callee->name ()),
340 cs->callee->order);
341 ipa_print_node_jump_functions_for_edge (f, cs);
342 }
343
344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
345 {
346 struct cgraph_indirect_call_info *ii;
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
349
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
352 fprintf (f, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
354 ii->member_ptr ? "member ptr" : "aggregate",
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
358 fprintf (f, " indirect %s callsite, calling param %i, "
359 "offset " HOST_WIDE_INT_PRINT_DEC,
360 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
361 ii->offset);
362
363 if (cs->call_stmt)
364 {
365 fprintf (f, ", for stmt ");
366 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
367 }
368 else
369 fprintf (f, "\n");
370 ipa_print_node_jump_functions_for_edge (f, cs);
371 }
372 }
373
374 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
375
376 void
377 ipa_print_all_jump_functions (FILE *f)
378 {
379 struct cgraph_node *node;
380
381 fprintf (f, "\nJump functions:\n");
382 FOR_EACH_FUNCTION (node)
383 {
384 ipa_print_node_jump_functions (f, node);
385 }
386 }
387
388 /* Set JFUNC to be a known type jump function. */
389
390 static void
391 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
392 tree base_type, tree component_type)
393 {
394 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
395 && TYPE_BINFO (component_type));
396 if (!flag_devirtualize)
397 return;
398 gcc_assert (BINFO_VTABLE (TYPE_BINFO (component_type)));
399 jfunc->type = IPA_JF_KNOWN_TYPE;
400 jfunc->value.known_type.offset = offset,
401 jfunc->value.known_type.base_type = base_type;
402 jfunc->value.known_type.component_type = component_type;
403 gcc_assert (component_type);
404 }
405
406 /* Set JFUNC to be a copy of another jmp (to be used by jump function
407 combination code). The two functions will share their rdesc. */
408
409 static void
410 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
411 struct ipa_jump_func *src)
412
413 {
414 gcc_checking_assert (src->type == IPA_JF_CONST);
415 dst->type = IPA_JF_CONST;
416 dst->value.constant = src->value.constant;
417 }
418
419 /* Set JFUNC to be a constant jmp function. */
420
421 static void
422 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
423 struct cgraph_edge *cs)
424 {
425 constant = unshare_expr (constant);
426 if (constant && EXPR_P (constant))
427 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
428 jfunc->type = IPA_JF_CONST;
429 jfunc->value.constant.value = unshare_expr_without_location (constant);
430
431 if (TREE_CODE (constant) == ADDR_EXPR
432 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
433 {
434 struct ipa_cst_ref_desc *rdesc;
435 if (!ipa_refdesc_pool)
436 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
437 sizeof (struct ipa_cst_ref_desc), 32);
438
439 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
440 rdesc->cs = cs;
441 rdesc->next_duplicate = NULL;
442 rdesc->refcount = 1;
443 jfunc->value.constant.rdesc = rdesc;
444 }
445 else
446 jfunc->value.constant.rdesc = NULL;
447 }
448
449 /* Set JFUNC to be a simple pass-through jump function. */
450 static void
451 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
452 bool agg_preserved, bool type_preserved)
453 {
454 jfunc->type = IPA_JF_PASS_THROUGH;
455 jfunc->value.pass_through.operand = NULL_TREE;
456 jfunc->value.pass_through.formal_id = formal_id;
457 jfunc->value.pass_through.operation = NOP_EXPR;
458 jfunc->value.pass_through.agg_preserved = agg_preserved;
459 jfunc->value.pass_through.type_preserved = type_preserved;
460 }
461
462 /* Set JFUNC to be an arithmetic pass through jump function. */
463
464 static void
465 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
466 tree operand, enum tree_code operation)
467 {
468 jfunc->type = IPA_JF_PASS_THROUGH;
469 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
470 jfunc->value.pass_through.formal_id = formal_id;
471 jfunc->value.pass_through.operation = operation;
472 jfunc->value.pass_through.agg_preserved = false;
473 jfunc->value.pass_through.type_preserved = false;
474 }
475
476 /* Set JFUNC to be an ancestor jump function. */
477
478 static void
479 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
480 tree type, int formal_id, bool agg_preserved,
481 bool type_preserved)
482 {
483 if (!flag_devirtualize)
484 type_preserved = false;
485 gcc_assert (!type_preserved
486 || (TREE_CODE (type) == RECORD_TYPE
487 && TYPE_BINFO (type)
488 && BINFO_VTABLE (TYPE_BINFO (type))));
489 jfunc->type = IPA_JF_ANCESTOR;
490 jfunc->value.ancestor.formal_id = formal_id;
491 jfunc->value.ancestor.offset = offset;
492 jfunc->value.ancestor.type = type_preserved ? type : NULL;
493 jfunc->value.ancestor.agg_preserved = agg_preserved;
494 jfunc->value.ancestor.type_preserved = type_preserved;
495 }
496
497 /* Extract the acual BINFO being described by JFUNC which must be a known type
498 jump function. */
499
500 tree
501 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
502 {
503 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
504 if (!base_binfo)
505 return NULL_TREE;
506 return get_binfo_at_offset (base_binfo,
507 jfunc->value.known_type.offset,
508 jfunc->value.known_type.component_type);
509 }
510
511 /* Structure to be passed in between detect_type_change and
512 check_stmt_for_type_change. */
513
514 struct type_change_info
515 {
516 /* Offset into the object where there is the virtual method pointer we are
517 looking for. */
518 HOST_WIDE_INT offset;
519 /* The declaration or SSA_NAME pointer of the base that we are checking for
520 type change. */
521 tree object;
522 /* If we actually can tell the type that the object has changed to, it is
523 stored in this field. Otherwise it remains NULL_TREE. */
524 tree known_current_type;
525 /* Set to true if dynamic type change has been detected. */
526 bool type_maybe_changed;
527 /* Set to true if multiple types have been encountered. known_current_type
528 must be disregarded in that case. */
529 bool multiple_types_encountered;
530 };
531
532 /* Return true if STMT can modify a virtual method table pointer.
533
534 This function makes special assumptions about both constructors and
535 destructors which are all the functions that are allowed to alter the VMT
536 pointers. It assumes that destructors begin with assignment into all VMT
537 pointers and that constructors essentially look in the following way:
538
539 1) The very first thing they do is that they call constructors of ancestor
540 sub-objects that have them.
541
542 2) Then VMT pointers of this and all its ancestors is set to new values
543 corresponding to the type corresponding to the constructor.
544
545 3) Only afterwards, other stuff such as constructor of member sub-objects
546 and the code written by the user is run. Only this may include calling
547 virtual functions, directly or indirectly.
548
549 There is no way to call a constructor of an ancestor sub-object in any
550 other way.
551
552 This means that we do not have to care whether constructors get the correct
553 type information because they will always change it (in fact, if we define
554 the type to be given by the VMT pointer, it is undefined).
555
556 The most important fact to derive from the above is that if, for some
557 statement in the section 3, we try to detect whether the dynamic type has
558 changed, we can safely ignore all calls as we examine the function body
559 backwards until we reach statements in section 2 because these calls cannot
560 be ancestor constructors or destructors (if the input is not bogus) and so
561 do not change the dynamic type (this holds true only for automatically
562 allocated objects but at the moment we devirtualize only these). We then
563 must detect that statements in section 2 change the dynamic type and can try
564 to derive the new type. That is enough and we can stop, we will never see
565 the calls into constructors of sub-objects in this code. Therefore we can
566 safely ignore all call statements that we traverse.
567 */
568
569 static bool
570 stmt_may_be_vtbl_ptr_store (gimple stmt)
571 {
572 if (is_gimple_call (stmt))
573 return false;
574 else if (gimple_clobber_p (stmt))
575 return false;
576 else if (is_gimple_assign (stmt))
577 {
578 tree lhs = gimple_assign_lhs (stmt);
579
580 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
581 {
582 if (flag_strict_aliasing
583 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
584 return false;
585
586 if (TREE_CODE (lhs) == COMPONENT_REF
587 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
588 return false;
589 /* In the future we might want to use get_base_ref_and_offset to find
590 if there is a field corresponding to the offset and if so, proceed
591 almost like if it was a component ref. */
592 }
593 }
594 return true;
595 }
596
597 /* If STMT can be proved to be an assignment to the virtual method table
598 pointer of ANALYZED_OBJ and the type associated with the new table
599 identified, return the type. Otherwise return NULL_TREE. */
600
601 static tree
602 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
603 {
604 HOST_WIDE_INT offset, size, max_size;
605 tree lhs, rhs, base, binfo;
606
607 if (!gimple_assign_single_p (stmt))
608 return NULL_TREE;
609
610 lhs = gimple_assign_lhs (stmt);
611 rhs = gimple_assign_rhs1 (stmt);
612 if (TREE_CODE (lhs) != COMPONENT_REF
613 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
614 return NULL_TREE;
615
616 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
617 if (offset != tci->offset
618 || size != POINTER_SIZE
619 || max_size != POINTER_SIZE)
620 return NULL_TREE;
621 if (TREE_CODE (base) == MEM_REF)
622 {
623 if (TREE_CODE (tci->object) != MEM_REF
624 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
625 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
626 TREE_OPERAND (base, 1)))
627 return NULL_TREE;
628 }
629 else if (tci->object != base)
630 return NULL_TREE;
631
632 binfo = vtable_pointer_value_to_binfo (rhs);
633
634 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
635 base of outer type. In this case we would need to either
636 work on binfos or translate it back to outer type and offset.
637 KNOWN_TYPE jump functions are not ready for that, yet. */
638 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
639 return NULL;
640
641 return BINFO_TYPE (binfo);
642 }
643
644 /* Callback of walk_aliased_vdefs and a helper function for
645 detect_type_change to check whether a particular statement may modify
646 the virtual table pointer, and if possible also determine the new type of
647 the (sub-)object. It stores its result into DATA, which points to a
648 type_change_info structure. */
649
650 static bool
651 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
652 {
653 gimple stmt = SSA_NAME_DEF_STMT (vdef);
654 struct type_change_info *tci = (struct type_change_info *) data;
655
656 if (stmt_may_be_vtbl_ptr_store (stmt))
657 {
658 tree type;
659 type = extr_type_from_vtbl_ptr_store (stmt, tci);
660 if (tci->type_maybe_changed
661 && type != tci->known_current_type)
662 tci->multiple_types_encountered = true;
663 tci->known_current_type = type;
664 tci->type_maybe_changed = true;
665 return true;
666 }
667 else
668 return false;
669 }
670
671
672
673 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
674 callsite CALL) by looking for assignments to its virtual table pointer. If
675 it is, return true and fill in the jump function JFUNC with relevant type
676 information or set it to unknown. ARG is the object itself (not a pointer
677 to it, unless dereferenced). BASE is the base of the memory access as
678 returned by get_ref_base_and_extent, as is the offset. */
679
680 static bool
681 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
682 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
683 {
684 struct type_change_info tci;
685 ao_ref ao;
686
687 gcc_checking_assert (DECL_P (arg)
688 || TREE_CODE (arg) == MEM_REF
689 || handled_component_p (arg));
690 /* Const calls cannot call virtual methods through VMT and so type changes do
691 not matter. */
692 if (!flag_devirtualize || !gimple_vuse (call)
693 /* Be sure expected_type is polymorphic. */
694 || !comp_type
695 || TREE_CODE (comp_type) != RECORD_TYPE
696 || !TYPE_BINFO (comp_type)
697 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
698 return true;
699
700 /* C++ methods are not allowed to change THIS pointer unless they
701 are constructors or destructors. */
702 if (TREE_CODE (base) == MEM_REF
703 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
704 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
705 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base, 0))) == PARM_DECL
706 && TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
707 && !DECL_CXX_CONSTRUCTOR_P (current_function_decl)
708 && !DECL_CXX_DESTRUCTOR_P (current_function_decl)
709 && (SSA_NAME_VAR (TREE_OPERAND (base, 0))
710 == DECL_ARGUMENTS (current_function_decl)))
711 return false;
712
713 ao_ref_init (&ao, arg);
714 ao.base = base;
715 ao.offset = offset;
716 ao.size = POINTER_SIZE;
717 ao.max_size = ao.size;
718
719 tci.offset = offset;
720 tci.object = get_base_address (arg);
721 tci.known_current_type = NULL_TREE;
722 tci.type_maybe_changed = false;
723 tci.multiple_types_encountered = false;
724
725 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
726 &tci, NULL);
727 if (!tci.type_maybe_changed)
728 return false;
729
730 if (!tci.known_current_type
731 || tci.multiple_types_encountered
732 || offset != 0)
733 jfunc->type = IPA_JF_UNKNOWN;
734 else
735 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
736
737 return true;
738 }
739
740 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
741 SSA name (its dereference will become the base and the offset is assumed to
742 be zero). */
743
744 static bool
745 detect_type_change_ssa (tree arg, tree comp_type,
746 gimple call, struct ipa_jump_func *jfunc)
747 {
748 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
749 if (!flag_devirtualize
750 || !POINTER_TYPE_P (TREE_TYPE (arg)))
751 return false;
752
753 arg = build2 (MEM_REF, ptr_type_node, arg,
754 build_int_cst (ptr_type_node, 0));
755
756 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
757 }
758
759 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
760 boolean variable pointed to by DATA. */
761
762 static bool
763 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
764 void *data)
765 {
766 bool *b = (bool *) data;
767 *b = true;
768 return true;
769 }
770
771 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
772 a value known not to be modified in this function before reaching the
773 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
774 information about the parameter. */
775
776 static bool
777 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
778 gimple stmt, tree parm_load)
779 {
780 bool modified = false;
781 bitmap *visited_stmts;
782 ao_ref refd;
783
784 if (parm_ainfo && parm_ainfo->parm_modified)
785 return false;
786
787 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
788 ao_ref_init (&refd, parm_load);
789 /* We can cache visited statements only when parm_ainfo is available and when
790 we are looking at a naked load of the whole parameter. */
791 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
792 visited_stmts = NULL;
793 else
794 visited_stmts = &parm_ainfo->parm_visited_statements;
795 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
796 visited_stmts);
797 if (parm_ainfo && modified)
798 parm_ainfo->parm_modified = true;
799 return !modified;
800 }
801
802 /* If STMT is an assignment that loads a value from an parameter declaration,
803 return the index of the parameter in ipa_node_params which has not been
804 modified. Otherwise return -1. */
805
806 static int
807 load_from_unmodified_param (vec<ipa_param_descriptor> descriptors,
808 struct param_analysis_info *parms_ainfo,
809 gimple stmt)
810 {
811 int index;
812 tree op1;
813
814 if (!gimple_assign_single_p (stmt))
815 return -1;
816
817 op1 = gimple_assign_rhs1 (stmt);
818 if (TREE_CODE (op1) != PARM_DECL)
819 return -1;
820
821 index = ipa_get_param_decl_index_1 (descriptors, op1);
822 if (index < 0
823 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
824 : NULL, stmt, op1))
825 return -1;
826
827 return index;
828 }
829
830 /* Return true if memory reference REF loads data that are known to be
831 unmodified in this function before reaching statement STMT. PARM_AINFO, if
832 non-NULL, is a pointer to a structure containing temporary information about
833 PARM. */
834
835 static bool
836 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
837 gimple stmt, tree ref)
838 {
839 bool modified = false;
840 ao_ref refd;
841
842 gcc_checking_assert (gimple_vuse (stmt));
843 if (parm_ainfo && parm_ainfo->ref_modified)
844 return false;
845
846 ao_ref_init (&refd, ref);
847 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
848 NULL);
849 if (parm_ainfo && modified)
850 parm_ainfo->ref_modified = true;
851 return !modified;
852 }
853
854 /* Return true if the data pointed to by PARM is known to be unmodified in this
855 function before reaching call statement CALL into which it is passed.
856 PARM_AINFO is a pointer to a structure containing temporary information
857 about PARM. */
858
859 static bool
860 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
861 gimple call, tree parm)
862 {
863 bool modified = false;
864 ao_ref refd;
865
866 /* It's unnecessary to calculate anything about memory contnets for a const
867 function because it is not goin to use it. But do not cache the result
868 either. Also, no such calculations for non-pointers. */
869 if (!gimple_vuse (call)
870 || !POINTER_TYPE_P (TREE_TYPE (parm)))
871 return false;
872
873 if (parm_ainfo->pt_modified)
874 return false;
875
876 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
877 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
878 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
879 if (modified)
880 parm_ainfo->pt_modified = true;
881 return !modified;
882 }
883
884 /* Return true if we can prove that OP is a memory reference loading unmodified
885 data from an aggregate passed as a parameter and if the aggregate is passed
886 by reference, that the alias type of the load corresponds to the type of the
887 formal parameter (so that we can rely on this type for TBAA in callers).
888 INFO and PARMS_AINFO describe parameters of the current function (but the
889 latter can be NULL), STMT is the load statement. If function returns true,
890 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
891 within the aggregate and whether it is a load from a value passed by
892 reference respectively. */
893
894 static bool
895 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor> descriptors,
896 struct param_analysis_info *parms_ainfo, gimple stmt,
897 tree op, int *index_p, HOST_WIDE_INT *offset_p,
898 HOST_WIDE_INT *size_p, bool *by_ref_p)
899 {
900 int index;
901 HOST_WIDE_INT size, max_size;
902 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
903
904 if (max_size == -1 || max_size != size || *offset_p < 0)
905 return false;
906
907 if (DECL_P (base))
908 {
909 int index = ipa_get_param_decl_index_1 (descriptors, base);
910 if (index >= 0
911 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
912 : NULL, stmt, op))
913 {
914 *index_p = index;
915 *by_ref_p = false;
916 if (size_p)
917 *size_p = size;
918 return true;
919 }
920 return false;
921 }
922
923 if (TREE_CODE (base) != MEM_REF
924 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
925 || !integer_zerop (TREE_OPERAND (base, 1)))
926 return false;
927
928 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
929 {
930 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
931 index = ipa_get_param_decl_index_1 (descriptors, parm);
932 }
933 else
934 {
935 /* This branch catches situations where a pointer parameter is not a
936 gimple register, for example:
937
938 void hip7(S*) (struct S * p)
939 {
940 void (*<T2e4>) (struct S *) D.1867;
941 struct S * p.1;
942
943 <bb 2>:
944 p.1_1 = p;
945 D.1867_2 = p.1_1->f;
946 D.1867_2 ();
947 gdp = &p;
948 */
949
950 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
951 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
952 }
953
954 if (index >= 0
955 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
956 stmt, op))
957 {
958 *index_p = index;
959 *by_ref_p = true;
960 if (size_p)
961 *size_p = size;
962 return true;
963 }
964 return false;
965 }
966
967 /* Just like the previous function, just without the param_analysis_info
968 pointer, for users outside of this file. */
969
970 bool
971 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
972 tree op, int *index_p, HOST_WIDE_INT *offset_p,
973 bool *by_ref_p)
974 {
975 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
976 offset_p, NULL, by_ref_p);
977 }
978
979 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
980 of an assignment statement STMT, try to determine whether we are actually
981 handling any of the following cases and construct an appropriate jump
982 function into JFUNC if so:
983
984 1) The passed value is loaded from a formal parameter which is not a gimple
985 register (most probably because it is addressable, the value has to be
986 scalar) and we can guarantee the value has not changed. This case can
987 therefore be described by a simple pass-through jump function. For example:
988
989 foo (int a)
990 {
991 int a.0;
992
993 a.0_2 = a;
994 bar (a.0_2);
995
996 2) The passed value can be described by a simple arithmetic pass-through
997 jump function. E.g.
998
999 foo (int a)
1000 {
1001 int D.2064;
1002
1003 D.2064_4 = a.1(D) + 4;
1004 bar (D.2064_4);
1005
1006 This case can also occur in combination of the previous one, e.g.:
1007
1008 foo (int a, int z)
1009 {
1010 int a.0;
1011 int D.2064;
1012
1013 a.0_3 = a;
1014 D.2064_4 = a.0_3 + 4;
1015 foo (D.2064_4);
1016
1017 3) The passed value is an address of an object within another one (which
1018 also passed by reference). Such situations are described by an ancestor
1019 jump function and describe situations such as:
1020
1021 B::foo() (struct B * const this)
1022 {
1023 struct A * D.1845;
1024
1025 D.1845_2 = &this_1(D)->D.1748;
1026 A::bar (D.1845_2);
1027
1028 INFO is the structure describing individual parameters access different
1029 stages of IPA optimizations. PARMS_AINFO contains the information that is
1030 only needed for intraprocedural analysis. */
1031
1032 static void
1033 compute_complex_assign_jump_func (struct ipa_node_params *info,
1034 struct param_analysis_info *parms_ainfo,
1035 struct ipa_jump_func *jfunc,
1036 gimple call, gimple stmt, tree name,
1037 tree param_type)
1038 {
1039 HOST_WIDE_INT offset, size, max_size;
1040 tree op1, tc_ssa, base, ssa;
1041 int index;
1042
1043 op1 = gimple_assign_rhs1 (stmt);
1044
1045 if (TREE_CODE (op1) == SSA_NAME)
1046 {
1047 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1048 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1049 else
1050 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
1051 SSA_NAME_DEF_STMT (op1));
1052 tc_ssa = op1;
1053 }
1054 else
1055 {
1056 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1057 tc_ssa = gimple_assign_lhs (stmt);
1058 }
1059
1060 if (index >= 0)
1061 {
1062 tree op2 = gimple_assign_rhs2 (stmt);
1063
1064 if (op2)
1065 {
1066 if (!is_gimple_ip_invariant (op2)
1067 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1068 && !useless_type_conversion_p (TREE_TYPE (name),
1069 TREE_TYPE (op1))))
1070 return;
1071
1072 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1073 gimple_assign_rhs_code (stmt));
1074 }
1075 else if (gimple_assign_single_p (stmt))
1076 {
1077 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1078 call, tc_ssa);
1079 bool type_p = false;
1080
1081 if (param_type && POINTER_TYPE_P (param_type))
1082 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1083 call, jfunc);
1084 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1085 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1086 }
1087 return;
1088 }
1089
1090 if (TREE_CODE (op1) != ADDR_EXPR)
1091 return;
1092 op1 = TREE_OPERAND (op1, 0);
1093 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1094 return;
1095 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1096 if (TREE_CODE (base) != MEM_REF
1097 /* If this is a varying address, punt. */
1098 || max_size == -1
1099 || max_size != size)
1100 return;
1101 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1102 ssa = TREE_OPERAND (base, 0);
1103 if (TREE_CODE (ssa) != SSA_NAME
1104 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1105 || offset < 0)
1106 return;
1107
1108 /* Dynamic types are changed in constructors and destructors. */
1109 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1110 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1111 {
1112 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1113 call, jfunc, offset);
1114 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1115 ipa_set_ancestor_jf (jfunc, offset,
1116 type_p ? TREE_TYPE (param_type) : NULL, index,
1117 parm_ref_data_pass_through_p (&parms_ainfo[index],
1118 call, ssa), type_p);
1119 }
1120 }
1121
1122 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1123 it looks like:
1124
1125 iftmp.1_3 = &obj_2(D)->D.1762;
1126
1127 The base of the MEM_REF must be a default definition SSA NAME of a
1128 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1129 whole MEM_REF expression is returned and the offset calculated from any
1130 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1131 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1132
1133 static tree
1134 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1135 {
1136 HOST_WIDE_INT size, max_size;
1137 tree expr, parm, obj;
1138
1139 if (!gimple_assign_single_p (assign))
1140 return NULL_TREE;
1141 expr = gimple_assign_rhs1 (assign);
1142
1143 if (TREE_CODE (expr) != ADDR_EXPR)
1144 return NULL_TREE;
1145 expr = TREE_OPERAND (expr, 0);
1146 obj = expr;
1147 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1148
1149 if (TREE_CODE (expr) != MEM_REF
1150 /* If this is a varying address, punt. */
1151 || max_size == -1
1152 || max_size != size
1153 || *offset < 0)
1154 return NULL_TREE;
1155 parm = TREE_OPERAND (expr, 0);
1156 if (TREE_CODE (parm) != SSA_NAME
1157 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1158 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1159 return NULL_TREE;
1160
1161 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1162 *obj_p = obj;
1163 return expr;
1164 }
1165
1166
1167 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1168 statement PHI, try to find out whether NAME is in fact a
1169 multiple-inheritance typecast from a descendant into an ancestor of a formal
1170 parameter and thus can be described by an ancestor jump function and if so,
1171 write the appropriate function into JFUNC.
1172
1173 Essentially we want to match the following pattern:
1174
1175 if (obj_2(D) != 0B)
1176 goto <bb 3>;
1177 else
1178 goto <bb 4>;
1179
1180 <bb 3>:
1181 iftmp.1_3 = &obj_2(D)->D.1762;
1182
1183 <bb 4>:
1184 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1185 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1186 return D.1879_6; */
1187
1188 static void
1189 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1190 struct param_analysis_info *parms_ainfo,
1191 struct ipa_jump_func *jfunc,
1192 gimple call, gimple phi, tree param_type)
1193 {
1194 HOST_WIDE_INT offset;
1195 gimple assign, cond;
1196 basic_block phi_bb, assign_bb, cond_bb;
1197 tree tmp, parm, expr, obj;
1198 int index, i;
1199
1200 if (gimple_phi_num_args (phi) != 2)
1201 return;
1202
1203 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1204 tmp = PHI_ARG_DEF (phi, 0);
1205 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1206 tmp = PHI_ARG_DEF (phi, 1);
1207 else
1208 return;
1209 if (TREE_CODE (tmp) != SSA_NAME
1210 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1211 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1212 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1213 return;
1214
1215 assign = SSA_NAME_DEF_STMT (tmp);
1216 assign_bb = gimple_bb (assign);
1217 if (!single_pred_p (assign_bb))
1218 return;
1219 expr = get_ancestor_addr_info (assign, &obj, &offset);
1220 if (!expr)
1221 return;
1222 parm = TREE_OPERAND (expr, 0);
1223 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1224 if (index < 0)
1225 return;
1226
1227 cond_bb = single_pred (assign_bb);
1228 cond = last_stmt (cond_bb);
1229 if (!cond
1230 || gimple_code (cond) != GIMPLE_COND
1231 || gimple_cond_code (cond) != NE_EXPR
1232 || gimple_cond_lhs (cond) != parm
1233 || !integer_zerop (gimple_cond_rhs (cond)))
1234 return;
1235
1236 phi_bb = gimple_bb (phi);
1237 for (i = 0; i < 2; i++)
1238 {
1239 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1240 if (pred != assign_bb && pred != cond_bb)
1241 return;
1242 }
1243
1244 bool type_p = false;
1245 if (param_type && POINTER_TYPE_P (param_type))
1246 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1247 call, jfunc, offset);
1248 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1249 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL, index,
1250 parm_ref_data_pass_through_p (&parms_ainfo[index],
1251 call, parm), type_p);
1252 }
1253
1254 /* Given OP which is passed as an actual argument to a called function,
1255 determine if it is possible to construct a KNOWN_TYPE jump function for it
1256 and if so, create one and store it to JFUNC.
1257 EXPECTED_TYPE represents a type the argument should be in */
1258
1259 static void
1260 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1261 gimple call, tree expected_type)
1262 {
1263 HOST_WIDE_INT offset, size, max_size;
1264 tree base;
1265
1266 if (!flag_devirtualize
1267 || TREE_CODE (op) != ADDR_EXPR
1268 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1269 /* Be sure expected_type is polymorphic. */
1270 || !expected_type
1271 || TREE_CODE (expected_type) != RECORD_TYPE
1272 || !TYPE_BINFO (expected_type)
1273 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1274 return;
1275
1276 op = TREE_OPERAND (op, 0);
1277 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1278 if (!DECL_P (base)
1279 || max_size == -1
1280 || max_size != size
1281 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1282 || is_global_var (base))
1283 return;
1284
1285 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1286 return;
1287
1288 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1289 expected_type);
1290 }
1291
1292 /* Inspect the given TYPE and return true iff it has the same structure (the
1293 same number of fields of the same types) as a C++ member pointer. If
1294 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1295 corresponding fields there. */
1296
1297 static bool
1298 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1299 {
1300 tree fld;
1301
1302 if (TREE_CODE (type) != RECORD_TYPE)
1303 return false;
1304
1305 fld = TYPE_FIELDS (type);
1306 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1307 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1308 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1309 return false;
1310
1311 if (method_ptr)
1312 *method_ptr = fld;
1313
1314 fld = DECL_CHAIN (fld);
1315 if (!fld || INTEGRAL_TYPE_P (fld)
1316 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1317 return false;
1318 if (delta)
1319 *delta = fld;
1320
1321 if (DECL_CHAIN (fld))
1322 return false;
1323
1324 return true;
1325 }
1326
1327 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1328 return the rhs of its defining statement. Otherwise return RHS as it
1329 is. */
1330
1331 static inline tree
1332 get_ssa_def_if_simple_copy (tree rhs)
1333 {
1334 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1335 {
1336 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1337
1338 if (gimple_assign_single_p (def_stmt))
1339 rhs = gimple_assign_rhs1 (def_stmt);
1340 else
1341 break;
1342 }
1343 return rhs;
1344 }
1345
1346 /* Simple linked list, describing known contents of an aggregate beforere
1347 call. */
1348
1349 struct ipa_known_agg_contents_list
1350 {
1351 /* Offset and size of the described part of the aggregate. */
1352 HOST_WIDE_INT offset, size;
1353 /* Known constant value or NULL if the contents is known to be unknown. */
1354 tree constant;
1355 /* Pointer to the next structure in the list. */
1356 struct ipa_known_agg_contents_list *next;
1357 };
1358
1359 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1360 in ARG is filled in with constant values. ARG can either be an aggregate
1361 expression or a pointer to an aggregate. ARG_TYPE is the type of the aggregate.
1362 JFUNC is the jump function into which the constants are subsequently stored. */
1363
1364 static void
1365 determine_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1366 struct ipa_jump_func *jfunc)
1367 {
1368 struct ipa_known_agg_contents_list *list = NULL;
1369 int item_count = 0, const_count = 0;
1370 HOST_WIDE_INT arg_offset, arg_size;
1371 gimple_stmt_iterator gsi;
1372 tree arg_base;
1373 bool check_ref, by_ref;
1374 ao_ref r;
1375
1376 /* The function operates in three stages. First, we prepare check_ref, r,
1377 arg_base and arg_offset based on what is actually passed as an actual
1378 argument. */
1379
1380 if (POINTER_TYPE_P (arg_type))
1381 {
1382 by_ref = true;
1383 if (TREE_CODE (arg) == SSA_NAME)
1384 {
1385 tree type_size;
1386 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1387 return;
1388 check_ref = true;
1389 arg_base = arg;
1390 arg_offset = 0;
1391 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1392 arg_size = tree_to_uhwi (type_size);
1393 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1394 }
1395 else if (TREE_CODE (arg) == ADDR_EXPR)
1396 {
1397 HOST_WIDE_INT arg_max_size;
1398
1399 arg = TREE_OPERAND (arg, 0);
1400 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1401 &arg_max_size);
1402 if (arg_max_size == -1
1403 || arg_max_size != arg_size
1404 || arg_offset < 0)
1405 return;
1406 if (DECL_P (arg_base))
1407 {
1408 tree size;
1409 check_ref = false;
1410 size = build_int_cst (integer_type_node, arg_size);
1411 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1412 }
1413 else
1414 return;
1415 }
1416 else
1417 return;
1418 }
1419 else
1420 {
1421 HOST_WIDE_INT arg_max_size;
1422
1423 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1424
1425 by_ref = false;
1426 check_ref = false;
1427 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1428 &arg_max_size);
1429 if (arg_max_size == -1
1430 || arg_max_size != arg_size
1431 || arg_offset < 0)
1432 return;
1433
1434 ao_ref_init (&r, arg);
1435 }
1436
1437 /* Second stage walks back the BB, looks at individual statements and as long
1438 as it is confident of how the statements affect contents of the
1439 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1440 describing it. */
1441 gsi = gsi_for_stmt (call);
1442 gsi_prev (&gsi);
1443 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1444 {
1445 struct ipa_known_agg_contents_list *n, **p;
1446 gimple stmt = gsi_stmt (gsi);
1447 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1448 tree lhs, rhs, lhs_base;
1449 bool partial_overlap;
1450
1451 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1452 continue;
1453 if (!gimple_assign_single_p (stmt))
1454 break;
1455
1456 lhs = gimple_assign_lhs (stmt);
1457 rhs = gimple_assign_rhs1 (stmt);
1458 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1459 || TREE_CODE (lhs) == BIT_FIELD_REF
1460 || contains_bitfld_component_ref_p (lhs))
1461 break;
1462
1463 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1464 &lhs_max_size);
1465 if (lhs_max_size == -1
1466 || lhs_max_size != lhs_size
1467 || (lhs_offset < arg_offset
1468 && lhs_offset + lhs_size > arg_offset)
1469 || (lhs_offset < arg_offset + arg_size
1470 && lhs_offset + lhs_size > arg_offset + arg_size))
1471 break;
1472
1473 if (check_ref)
1474 {
1475 if (TREE_CODE (lhs_base) != MEM_REF
1476 || TREE_OPERAND (lhs_base, 0) != arg_base
1477 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1478 break;
1479 }
1480 else if (lhs_base != arg_base)
1481 {
1482 if (DECL_P (lhs_base))
1483 continue;
1484 else
1485 break;
1486 }
1487
1488 if (lhs_offset + lhs_size < arg_offset
1489 || lhs_offset >= (arg_offset + arg_size))
1490 continue;
1491
1492 partial_overlap = false;
1493 p = &list;
1494 while (*p && (*p)->offset < lhs_offset)
1495 {
1496 if ((*p)->offset + (*p)->size > lhs_offset)
1497 {
1498 partial_overlap = true;
1499 break;
1500 }
1501 p = &(*p)->next;
1502 }
1503 if (partial_overlap)
1504 break;
1505 if (*p && (*p)->offset < lhs_offset + lhs_size)
1506 {
1507 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1508 /* We already know this value is subsequently overwritten with
1509 something else. */
1510 continue;
1511 else
1512 /* Otherwise this is a partial overlap which we cannot
1513 represent. */
1514 break;
1515 }
1516
1517 rhs = get_ssa_def_if_simple_copy (rhs);
1518 n = XALLOCA (struct ipa_known_agg_contents_list);
1519 n->size = lhs_size;
1520 n->offset = lhs_offset;
1521 if (is_gimple_ip_invariant (rhs))
1522 {
1523 n->constant = rhs;
1524 const_count++;
1525 }
1526 else
1527 n->constant = NULL_TREE;
1528 n->next = *p;
1529 *p = n;
1530
1531 item_count++;
1532 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1533 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1534 break;
1535 }
1536
1537 /* Third stage just goes over the list and creates an appropriate vector of
1538 ipa_agg_jf_item structures out of it, of sourse only if there are
1539 any known constants to begin with. */
1540
1541 if (const_count)
1542 {
1543 jfunc->agg.by_ref = by_ref;
1544 vec_alloc (jfunc->agg.items, const_count);
1545 while (list)
1546 {
1547 if (list->constant)
1548 {
1549 struct ipa_agg_jf_item item;
1550 item.offset = list->offset - arg_offset;
1551 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1552 item.value = unshare_expr_without_location (list->constant);
1553 jfunc->agg.items->quick_push (item);
1554 }
1555 list = list->next;
1556 }
1557 }
1558 }
1559
1560 static tree
1561 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1562 {
1563 int n;
1564 tree type = (e->callee
1565 ? TREE_TYPE (e->callee->decl)
1566 : gimple_call_fntype (e->call_stmt));
1567 tree t = TYPE_ARG_TYPES (type);
1568
1569 for (n = 0; n < i; n++)
1570 {
1571 if (!t)
1572 break;
1573 t = TREE_CHAIN (t);
1574 }
1575 if (t)
1576 return TREE_VALUE (t);
1577 if (!e->callee)
1578 return NULL;
1579 t = DECL_ARGUMENTS (e->callee->decl);
1580 for (n = 0; n < i; n++)
1581 {
1582 if (!t)
1583 return NULL;
1584 t = TREE_CHAIN (t);
1585 }
1586 if (t)
1587 return TREE_TYPE (t);
1588 return NULL;
1589 }
1590
1591 /* Compute jump function for all arguments of callsite CS and insert the
1592 information in the jump_functions array in the ipa_edge_args corresponding
1593 to this callsite. */
1594
1595 static void
1596 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1597 struct cgraph_edge *cs)
1598 {
1599 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1600 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1601 gimple call = cs->call_stmt;
1602 int n, arg_num = gimple_call_num_args (call);
1603
1604 if (arg_num == 0 || args->jump_functions)
1605 return;
1606 vec_safe_grow_cleared (args->jump_functions, arg_num);
1607
1608 if (gimple_call_internal_p (call))
1609 return;
1610 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1611 return;
1612
1613 for (n = 0; n < arg_num; n++)
1614 {
1615 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1616 tree arg = gimple_call_arg (call, n);
1617 tree param_type = ipa_get_callee_param_type (cs, n);
1618
1619 if (is_gimple_ip_invariant (arg))
1620 ipa_set_jf_constant (jfunc, arg, cs);
1621 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1622 && TREE_CODE (arg) == PARM_DECL)
1623 {
1624 int index = ipa_get_param_decl_index (info, arg);
1625
1626 gcc_assert (index >=0);
1627 /* Aggregate passed by value, check for pass-through, otherwise we
1628 will attempt to fill in aggregate contents later in this
1629 for cycle. */
1630 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1631 {
1632 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1633 continue;
1634 }
1635 }
1636 else if (TREE_CODE (arg) == SSA_NAME)
1637 {
1638 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1639 {
1640 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1641 if (index >= 0)
1642 {
1643 bool agg_p, type_p;
1644 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1645 call, arg);
1646 if (param_type && POINTER_TYPE_P (param_type))
1647 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1648 call, jfunc);
1649 else
1650 type_p = false;
1651 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1652 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1653 type_p);
1654 }
1655 }
1656 else
1657 {
1658 gimple stmt = SSA_NAME_DEF_STMT (arg);
1659 if (is_gimple_assign (stmt))
1660 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1661 call, stmt, arg, param_type);
1662 else if (gimple_code (stmt) == GIMPLE_PHI)
1663 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1664 call, stmt, param_type);
1665 }
1666 }
1667 else
1668 compute_known_type_jump_func (arg, jfunc, call,
1669 param_type
1670 && POINTER_TYPE_P (param_type)
1671 ? TREE_TYPE (param_type)
1672 : NULL);
1673
1674 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1675 passed (because type conversions are ignored in gimple). Usually we can
1676 safely get type from function declaration, but in case of K&R prototypes or
1677 variadic functions we can try our luck with type of the pointer passed.
1678 TODO: Since we look for actual initialization of the memory object, we may better
1679 work out the type based on the memory stores we find. */
1680 if (!param_type)
1681 param_type = TREE_TYPE (arg);
1682
1683 if ((jfunc->type != IPA_JF_PASS_THROUGH
1684 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1685 && (jfunc->type != IPA_JF_ANCESTOR
1686 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1687 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1688 || POINTER_TYPE_P (param_type)))
1689 determine_known_aggregate_parts (call, arg, param_type, jfunc);
1690 }
1691 }
1692
1693 /* Compute jump functions for all edges - both direct and indirect - outgoing
1694 from NODE. Also count the actual arguments in the process. */
1695
1696 static void
1697 ipa_compute_jump_functions (struct cgraph_node *node,
1698 struct param_analysis_info *parms_ainfo)
1699 {
1700 struct cgraph_edge *cs;
1701
1702 for (cs = node->callees; cs; cs = cs->next_callee)
1703 {
1704 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1705 NULL);
1706 /* We do not need to bother analyzing calls to unknown
1707 functions unless they may become known during lto/whopr. */
1708 if (!callee->definition && !flag_lto)
1709 continue;
1710 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1711 }
1712
1713 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1714 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1715 }
1716
1717 /* If STMT looks like a statement loading a value from a member pointer formal
1718 parameter, return that parameter and store the offset of the field to
1719 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1720 might be clobbered). If USE_DELTA, then we look for a use of the delta
1721 field rather than the pfn. */
1722
1723 static tree
1724 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1725 HOST_WIDE_INT *offset_p)
1726 {
1727 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1728
1729 if (!gimple_assign_single_p (stmt))
1730 return NULL_TREE;
1731
1732 rhs = gimple_assign_rhs1 (stmt);
1733 if (TREE_CODE (rhs) == COMPONENT_REF)
1734 {
1735 ref_field = TREE_OPERAND (rhs, 1);
1736 rhs = TREE_OPERAND (rhs, 0);
1737 }
1738 else
1739 ref_field = NULL_TREE;
1740 if (TREE_CODE (rhs) != MEM_REF)
1741 return NULL_TREE;
1742 rec = TREE_OPERAND (rhs, 0);
1743 if (TREE_CODE (rec) != ADDR_EXPR)
1744 return NULL_TREE;
1745 rec = TREE_OPERAND (rec, 0);
1746 if (TREE_CODE (rec) != PARM_DECL
1747 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1748 return NULL_TREE;
1749 ref_offset = TREE_OPERAND (rhs, 1);
1750
1751 if (use_delta)
1752 fld = delta_field;
1753 else
1754 fld = ptr_field;
1755 if (offset_p)
1756 *offset_p = int_bit_position (fld);
1757
1758 if (ref_field)
1759 {
1760 if (integer_nonzerop (ref_offset))
1761 return NULL_TREE;
1762 return ref_field == fld ? rec : NULL_TREE;
1763 }
1764 else
1765 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1766 : NULL_TREE;
1767 }
1768
1769 /* Returns true iff T is an SSA_NAME defined by a statement. */
1770
1771 static bool
1772 ipa_is_ssa_with_stmt_def (tree t)
1773 {
1774 if (TREE_CODE (t) == SSA_NAME
1775 && !SSA_NAME_IS_DEFAULT_DEF (t))
1776 return true;
1777 else
1778 return false;
1779 }
1780
1781 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1782 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1783 indirect call graph edge. */
1784
1785 static struct cgraph_edge *
1786 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1787 {
1788 struct cgraph_edge *cs;
1789
1790 cs = cgraph_edge (node, stmt);
1791 cs->indirect_info->param_index = param_index;
1792 cs->indirect_info->agg_contents = 0;
1793 cs->indirect_info->member_ptr = 0;
1794 return cs;
1795 }
1796
1797 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1798 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1799 intermediate information about each formal parameter. Currently it checks
1800 whether the call calls a pointer that is a formal parameter and if so, the
1801 parameter is marked with the called flag and an indirect call graph edge
1802 describing the call is created. This is very simple for ordinary pointers
1803 represented in SSA but not-so-nice when it comes to member pointers. The
1804 ugly part of this function does nothing more than trying to match the
1805 pattern of such a call. An example of such a pattern is the gimple dump
1806 below, the call is on the last line:
1807
1808 <bb 2>:
1809 f$__delta_5 = f.__delta;
1810 f$__pfn_24 = f.__pfn;
1811
1812 or
1813 <bb 2>:
1814 f$__delta_5 = MEM[(struct *)&f];
1815 f$__pfn_24 = MEM[(struct *)&f + 4B];
1816
1817 and a few lines below:
1818
1819 <bb 5>
1820 D.2496_3 = (int) f$__pfn_24;
1821 D.2497_4 = D.2496_3 & 1;
1822 if (D.2497_4 != 0)
1823 goto <bb 3>;
1824 else
1825 goto <bb 4>;
1826
1827 <bb 6>:
1828 D.2500_7 = (unsigned int) f$__delta_5;
1829 D.2501_8 = &S + D.2500_7;
1830 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1831 D.2503_10 = *D.2502_9;
1832 D.2504_12 = f$__pfn_24 + -1;
1833 D.2505_13 = (unsigned int) D.2504_12;
1834 D.2506_14 = D.2503_10 + D.2505_13;
1835 D.2507_15 = *D.2506_14;
1836 iftmp.11_16 = (String:: *) D.2507_15;
1837
1838 <bb 7>:
1839 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1840 D.2500_19 = (unsigned int) f$__delta_5;
1841 D.2508_20 = &S + D.2500_19;
1842 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1843
1844 Such patterns are results of simple calls to a member pointer:
1845
1846 int doprinting (int (MyString::* f)(int) const)
1847 {
1848 MyString S ("somestring");
1849
1850 return (S.*f)(4);
1851 }
1852
1853 Moreover, the function also looks for called pointers loaded from aggregates
1854 passed by value or reference. */
1855
1856 static void
1857 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1858 struct ipa_node_params *info,
1859 struct param_analysis_info *parms_ainfo,
1860 gimple call, tree target)
1861 {
1862 gimple def;
1863 tree n1, n2;
1864 gimple d1, d2;
1865 tree rec, rec2, cond;
1866 gimple branch;
1867 int index;
1868 basic_block bb, virt_bb, join;
1869 HOST_WIDE_INT offset;
1870 bool by_ref;
1871
1872 if (SSA_NAME_IS_DEFAULT_DEF (target))
1873 {
1874 tree var = SSA_NAME_VAR (target);
1875 index = ipa_get_param_decl_index (info, var);
1876 if (index >= 0)
1877 ipa_note_param_call (node, index, call);
1878 return;
1879 }
1880
1881 def = SSA_NAME_DEF_STMT (target);
1882 if (gimple_assign_single_p (def)
1883 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1884 gimple_assign_rhs1 (def), &index, &offset,
1885 NULL, &by_ref))
1886 {
1887 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1888 if (cs->indirect_info->offset != offset)
1889 cs->indirect_info->outer_type = NULL;
1890 cs->indirect_info->offset = offset;
1891 cs->indirect_info->agg_contents = 1;
1892 cs->indirect_info->by_ref = by_ref;
1893 return;
1894 }
1895
1896 /* Now we need to try to match the complex pattern of calling a member
1897 pointer. */
1898 if (gimple_code (def) != GIMPLE_PHI
1899 || gimple_phi_num_args (def) != 2
1900 || !POINTER_TYPE_P (TREE_TYPE (target))
1901 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1902 return;
1903
1904 /* First, we need to check whether one of these is a load from a member
1905 pointer that is a parameter to this function. */
1906 n1 = PHI_ARG_DEF (def, 0);
1907 n2 = PHI_ARG_DEF (def, 1);
1908 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1909 return;
1910 d1 = SSA_NAME_DEF_STMT (n1);
1911 d2 = SSA_NAME_DEF_STMT (n2);
1912
1913 join = gimple_bb (def);
1914 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1915 {
1916 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1917 return;
1918
1919 bb = EDGE_PRED (join, 0)->src;
1920 virt_bb = gimple_bb (d2);
1921 }
1922 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1923 {
1924 bb = EDGE_PRED (join, 1)->src;
1925 virt_bb = gimple_bb (d1);
1926 }
1927 else
1928 return;
1929
1930 /* Second, we need to check that the basic blocks are laid out in the way
1931 corresponding to the pattern. */
1932
1933 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1934 || single_pred (virt_bb) != bb
1935 || single_succ (virt_bb) != join)
1936 return;
1937
1938 /* Third, let's see that the branching is done depending on the least
1939 significant bit of the pfn. */
1940
1941 branch = last_stmt (bb);
1942 if (!branch || gimple_code (branch) != GIMPLE_COND)
1943 return;
1944
1945 if ((gimple_cond_code (branch) != NE_EXPR
1946 && gimple_cond_code (branch) != EQ_EXPR)
1947 || !integer_zerop (gimple_cond_rhs (branch)))
1948 return;
1949
1950 cond = gimple_cond_lhs (branch);
1951 if (!ipa_is_ssa_with_stmt_def (cond))
1952 return;
1953
1954 def = SSA_NAME_DEF_STMT (cond);
1955 if (!is_gimple_assign (def)
1956 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1957 || !integer_onep (gimple_assign_rhs2 (def)))
1958 return;
1959
1960 cond = gimple_assign_rhs1 (def);
1961 if (!ipa_is_ssa_with_stmt_def (cond))
1962 return;
1963
1964 def = SSA_NAME_DEF_STMT (cond);
1965
1966 if (is_gimple_assign (def)
1967 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1968 {
1969 cond = gimple_assign_rhs1 (def);
1970 if (!ipa_is_ssa_with_stmt_def (cond))
1971 return;
1972 def = SSA_NAME_DEF_STMT (cond);
1973 }
1974
1975 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1976 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1977 == ptrmemfunc_vbit_in_delta),
1978 NULL);
1979 if (rec != rec2)
1980 return;
1981
1982 index = ipa_get_param_decl_index (info, rec);
1983 if (index >= 0
1984 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1985 {
1986 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1987 if (cs->indirect_info->offset != offset)
1988 cs->indirect_info->outer_type = NULL;
1989 cs->indirect_info->offset = offset;
1990 cs->indirect_info->agg_contents = 1;
1991 cs->indirect_info->member_ptr = 1;
1992 }
1993
1994 return;
1995 }
1996
1997 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1998 object referenced in the expression is a formal parameter of the caller
1999 (described by INFO), create a call note for the statement. */
2000
2001 static void
2002 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
2003 struct ipa_node_params *info, gimple call,
2004 tree target)
2005 {
2006 struct cgraph_edge *cs;
2007 struct cgraph_indirect_call_info *ii;
2008 struct ipa_jump_func jfunc;
2009 tree obj = OBJ_TYPE_REF_OBJECT (target);
2010 int index;
2011 HOST_WIDE_INT anc_offset;
2012
2013 if (!flag_devirtualize)
2014 return;
2015
2016 if (TREE_CODE (obj) != SSA_NAME)
2017 return;
2018
2019 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2020 {
2021 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2022 return;
2023
2024 anc_offset = 0;
2025 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2026 gcc_assert (index >= 0);
2027 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2028 call, &jfunc))
2029 return;
2030 }
2031 else
2032 {
2033 gimple stmt = SSA_NAME_DEF_STMT (obj);
2034 tree expr;
2035
2036 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2037 if (!expr)
2038 return;
2039 index = ipa_get_param_decl_index (info,
2040 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2041 gcc_assert (index >= 0);
2042 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2043 call, &jfunc, anc_offset))
2044 return;
2045 }
2046
2047 cs = ipa_note_param_call (node, index, call);
2048 ii = cs->indirect_info;
2049 ii->offset = anc_offset;
2050 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2051 ii->otr_type = obj_type_ref_class (target);
2052 ii->polymorphic = 1;
2053 }
2054
2055 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2056 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2057 containing intermediate information about each formal parameter. */
2058
2059 static void
2060 ipa_analyze_call_uses (struct cgraph_node *node,
2061 struct ipa_node_params *info,
2062 struct param_analysis_info *parms_ainfo, gimple call)
2063 {
2064 tree target = gimple_call_fn (call);
2065 struct cgraph_edge *cs;
2066
2067 if (!target
2068 || (TREE_CODE (target) != SSA_NAME
2069 && !virtual_method_call_p (target)))
2070 return;
2071
2072 /* If we previously turned the call into a direct call, there is
2073 no need to analyze. */
2074 cs = cgraph_edge (node, call);
2075 if (cs && !cs->indirect_unknown_callee)
2076 return;
2077 if (TREE_CODE (target) == SSA_NAME)
2078 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2079 else if (virtual_method_call_p (target))
2080 ipa_analyze_virtual_call_uses (node, info, call, target);
2081 }
2082
2083
2084 /* Analyze the call statement STMT with respect to formal parameters (described
2085 in INFO) of caller given by NODE. Currently it only checks whether formal
2086 parameters are called. PARMS_AINFO is a pointer to a vector containing
2087 intermediate information about each formal parameter. */
2088
2089 static void
2090 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2091 struct param_analysis_info *parms_ainfo, gimple stmt)
2092 {
2093 if (is_gimple_call (stmt))
2094 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2095 }
2096
2097 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2098 If OP is a parameter declaration, mark it as used in the info structure
2099 passed in DATA. */
2100
2101 static bool
2102 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2103 {
2104 struct ipa_node_params *info = (struct ipa_node_params *) data;
2105
2106 op = get_base_address (op);
2107 if (op
2108 && TREE_CODE (op) == PARM_DECL)
2109 {
2110 int index = ipa_get_param_decl_index (info, op);
2111 gcc_assert (index >= 0);
2112 ipa_set_param_used (info, index, true);
2113 }
2114
2115 return false;
2116 }
2117
2118 /* Scan the function body of NODE and inspect the uses of formal parameters.
2119 Store the findings in various structures of the associated ipa_node_params
2120 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2121 vector containing intermediate information about each formal parameter. */
2122
2123 static void
2124 ipa_analyze_params_uses (struct cgraph_node *node,
2125 struct param_analysis_info *parms_ainfo)
2126 {
2127 tree decl = node->decl;
2128 basic_block bb;
2129 struct function *func;
2130 gimple_stmt_iterator gsi;
2131 struct ipa_node_params *info = IPA_NODE_REF (node);
2132 int i;
2133
2134 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2135 return;
2136
2137 info->uses_analysis_done = 1;
2138 if (ipa_func_spec_opts_forbid_analysis_p (node))
2139 {
2140 for (i = 0; i < ipa_get_param_count (info); i++)
2141 {
2142 ipa_set_param_used (info, i, true);
2143 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2144 }
2145 return;
2146 }
2147
2148 for (i = 0; i < ipa_get_param_count (info); i++)
2149 {
2150 tree parm = ipa_get_param (info, i);
2151 int controlled_uses = 0;
2152
2153 /* For SSA regs see if parameter is used. For non-SSA we compute
2154 the flag during modification analysis. */
2155 if (is_gimple_reg (parm))
2156 {
2157 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2158 parm);
2159 if (ddef && !has_zero_uses (ddef))
2160 {
2161 imm_use_iterator imm_iter;
2162 use_operand_p use_p;
2163
2164 ipa_set_param_used (info, i, true);
2165 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2166 if (!is_gimple_call (USE_STMT (use_p)))
2167 {
2168 if (!is_gimple_debug (USE_STMT (use_p)))
2169 {
2170 controlled_uses = IPA_UNDESCRIBED_USE;
2171 break;
2172 }
2173 }
2174 else
2175 controlled_uses++;
2176 }
2177 else
2178 controlled_uses = 0;
2179 }
2180 else
2181 controlled_uses = IPA_UNDESCRIBED_USE;
2182 ipa_set_controlled_uses (info, i, controlled_uses);
2183 }
2184
2185 func = DECL_STRUCT_FUNCTION (decl);
2186 FOR_EACH_BB_FN (bb, func)
2187 {
2188 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2189 {
2190 gimple stmt = gsi_stmt (gsi);
2191
2192 if (is_gimple_debug (stmt))
2193 continue;
2194
2195 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2196 walk_stmt_load_store_addr_ops (stmt, info,
2197 visit_ref_for_mod_analysis,
2198 visit_ref_for_mod_analysis,
2199 visit_ref_for_mod_analysis);
2200 }
2201 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2202 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2203 visit_ref_for_mod_analysis,
2204 visit_ref_for_mod_analysis,
2205 visit_ref_for_mod_analysis);
2206 }
2207 }
2208
2209 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2210
2211 static void
2212 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2213 {
2214 int i;
2215
2216 for (i = 0; i < param_count; i++)
2217 {
2218 if (parms_ainfo[i].parm_visited_statements)
2219 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2220 if (parms_ainfo[i].pt_visited_statements)
2221 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2222 }
2223 }
2224
2225 /* Initialize the array describing properties of of formal parameters
2226 of NODE, analyze their uses and compute jump functions associated
2227 with actual arguments of calls from within NODE. */
2228
2229 void
2230 ipa_analyze_node (struct cgraph_node *node)
2231 {
2232 struct ipa_node_params *info;
2233 struct param_analysis_info *parms_ainfo;
2234 int param_count;
2235
2236 ipa_check_create_node_params ();
2237 ipa_check_create_edge_args ();
2238 info = IPA_NODE_REF (node);
2239 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2240 ipa_initialize_node_params (node);
2241
2242 param_count = ipa_get_param_count (info);
2243 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2244 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2245
2246 ipa_analyze_params_uses (node, parms_ainfo);
2247 ipa_compute_jump_functions (node, parms_ainfo);
2248
2249 free_parms_ainfo (parms_ainfo, param_count);
2250 pop_cfun ();
2251 }
2252
2253 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2254 attempt a type-based devirtualization. If successful, return the
2255 target function declaration, otherwise return NULL. */
2256
2257 tree
2258 ipa_intraprocedural_devirtualization (gimple call)
2259 {
2260 tree binfo, token, fndecl;
2261 struct ipa_jump_func jfunc;
2262 tree otr = gimple_call_fn (call);
2263
2264 jfunc.type = IPA_JF_UNKNOWN;
2265 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2266 call, obj_type_ref_class (otr));
2267 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2268 return NULL_TREE;
2269 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2270 if (!binfo)
2271 return NULL_TREE;
2272 token = OBJ_TYPE_REF_TOKEN (otr);
2273 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2274 binfo);
2275 #ifdef ENABLE_CHECKING
2276 if (fndecl)
2277 gcc_assert (possible_polymorphic_call_target_p
2278 (otr, cgraph_get_node (fndecl)));
2279 #endif
2280 return fndecl;
2281 }
2282
2283 /* Update the jump function DST when the call graph edge corresponding to SRC is
2284 is being inlined, knowing that DST is of type ancestor and src of known
2285 type. */
2286
2287 static void
2288 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2289 struct ipa_jump_func *dst)
2290 {
2291 HOST_WIDE_INT combined_offset;
2292 tree combined_type;
2293
2294 if (!ipa_get_jf_ancestor_type_preserved (dst))
2295 {
2296 dst->type = IPA_JF_UNKNOWN;
2297 return;
2298 }
2299
2300 combined_offset = ipa_get_jf_known_type_offset (src)
2301 + ipa_get_jf_ancestor_offset (dst);
2302 combined_type = ipa_get_jf_ancestor_type (dst);
2303
2304 ipa_set_jf_known_type (dst, combined_offset,
2305 ipa_get_jf_known_type_base_type (src),
2306 combined_type);
2307 }
2308
2309 /* Update the jump functions associated with call graph edge E when the call
2310 graph edge CS is being inlined, assuming that E->caller is already (possibly
2311 indirectly) inlined into CS->callee and that E has not been inlined. */
2312
2313 static void
2314 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2315 struct cgraph_edge *e)
2316 {
2317 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2318 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2319 int count = ipa_get_cs_argument_count (args);
2320 int i;
2321
2322 for (i = 0; i < count; i++)
2323 {
2324 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2325
2326 if (dst->type == IPA_JF_ANCESTOR)
2327 {
2328 struct ipa_jump_func *src;
2329 int dst_fid = dst->value.ancestor.formal_id;
2330
2331 /* Variable number of arguments can cause havoc if we try to access
2332 one that does not exist in the inlined edge. So make sure we
2333 don't. */
2334 if (dst_fid >= ipa_get_cs_argument_count (top))
2335 {
2336 dst->type = IPA_JF_UNKNOWN;
2337 continue;
2338 }
2339
2340 src = ipa_get_ith_jump_func (top, dst_fid);
2341
2342 if (src->agg.items
2343 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2344 {
2345 struct ipa_agg_jf_item *item;
2346 int j;
2347
2348 /* Currently we do not produce clobber aggregate jump functions,
2349 replace with merging when we do. */
2350 gcc_assert (!dst->agg.items);
2351
2352 dst->agg.items = vec_safe_copy (src->agg.items);
2353 dst->agg.by_ref = src->agg.by_ref;
2354 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2355 item->offset -= dst->value.ancestor.offset;
2356 }
2357
2358 if (src->type == IPA_JF_KNOWN_TYPE)
2359 combine_known_type_and_ancestor_jfs (src, dst);
2360 else if (src->type == IPA_JF_PASS_THROUGH
2361 && src->value.pass_through.operation == NOP_EXPR)
2362 {
2363 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2364 dst->value.ancestor.agg_preserved &=
2365 src->value.pass_through.agg_preserved;
2366 dst->value.ancestor.type_preserved &=
2367 src->value.pass_through.type_preserved;
2368 }
2369 else if (src->type == IPA_JF_ANCESTOR)
2370 {
2371 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2372 dst->value.ancestor.offset += src->value.ancestor.offset;
2373 dst->value.ancestor.agg_preserved &=
2374 src->value.ancestor.agg_preserved;
2375 dst->value.ancestor.type_preserved &=
2376 src->value.ancestor.type_preserved;
2377 }
2378 else
2379 dst->type = IPA_JF_UNKNOWN;
2380 }
2381 else if (dst->type == IPA_JF_PASS_THROUGH)
2382 {
2383 struct ipa_jump_func *src;
2384 /* We must check range due to calls with variable number of arguments
2385 and we cannot combine jump functions with operations. */
2386 if (dst->value.pass_through.operation == NOP_EXPR
2387 && (dst->value.pass_through.formal_id
2388 < ipa_get_cs_argument_count (top)))
2389 {
2390 int dst_fid = dst->value.pass_through.formal_id;
2391 src = ipa_get_ith_jump_func (top, dst_fid);
2392 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2393
2394 switch (src->type)
2395 {
2396 case IPA_JF_UNKNOWN:
2397 dst->type = IPA_JF_UNKNOWN;
2398 break;
2399 case IPA_JF_KNOWN_TYPE:
2400 if (ipa_get_jf_pass_through_type_preserved (dst))
2401 ipa_set_jf_known_type (dst,
2402 ipa_get_jf_known_type_offset (src),
2403 ipa_get_jf_known_type_base_type (src),
2404 ipa_get_jf_known_type_component_type (src));
2405 else
2406 dst->type = IPA_JF_UNKNOWN;
2407 break;
2408 case IPA_JF_CONST:
2409 ipa_set_jf_cst_copy (dst, src);
2410 break;
2411
2412 case IPA_JF_PASS_THROUGH:
2413 {
2414 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2415 enum tree_code operation;
2416 operation = ipa_get_jf_pass_through_operation (src);
2417
2418 if (operation == NOP_EXPR)
2419 {
2420 bool agg_p, type_p;
2421 agg_p = dst_agg_p
2422 && ipa_get_jf_pass_through_agg_preserved (src);
2423 type_p = ipa_get_jf_pass_through_type_preserved (src)
2424 && ipa_get_jf_pass_through_type_preserved (dst);
2425 ipa_set_jf_simple_pass_through (dst, formal_id,
2426 agg_p, type_p);
2427 }
2428 else
2429 {
2430 tree operand = ipa_get_jf_pass_through_operand (src);
2431 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2432 operation);
2433 }
2434 break;
2435 }
2436 case IPA_JF_ANCESTOR:
2437 {
2438 bool agg_p, type_p;
2439 agg_p = dst_agg_p
2440 && ipa_get_jf_ancestor_agg_preserved (src);
2441 type_p = ipa_get_jf_ancestor_type_preserved (src)
2442 && ipa_get_jf_pass_through_type_preserved (dst);
2443 ipa_set_ancestor_jf (dst,
2444 ipa_get_jf_ancestor_offset (src),
2445 ipa_get_jf_ancestor_type (src),
2446 ipa_get_jf_ancestor_formal_id (src),
2447 agg_p, type_p);
2448 break;
2449 }
2450 default:
2451 gcc_unreachable ();
2452 }
2453
2454 if (src->agg.items
2455 && (dst_agg_p || !src->agg.by_ref))
2456 {
2457 /* Currently we do not produce clobber aggregate jump
2458 functions, replace with merging when we do. */
2459 gcc_assert (!dst->agg.items);
2460
2461 dst->agg.by_ref = src->agg.by_ref;
2462 dst->agg.items = vec_safe_copy (src->agg.items);
2463 }
2464 }
2465 else
2466 dst->type = IPA_JF_UNKNOWN;
2467 }
2468 }
2469 }
2470
2471 /* If TARGET is an addr_expr of a function declaration, make it the destination
2472 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2473
2474 struct cgraph_edge *
2475 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2476 {
2477 struct cgraph_node *callee;
2478 struct inline_edge_summary *es = inline_edge_summary (ie);
2479 bool unreachable = false;
2480
2481 if (TREE_CODE (target) == ADDR_EXPR)
2482 target = TREE_OPERAND (target, 0);
2483 if (TREE_CODE (target) != FUNCTION_DECL)
2484 {
2485 target = canonicalize_constructor_val (target, NULL);
2486 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2487 {
2488 if (ie->indirect_info->member_ptr)
2489 /* Member pointer call that goes through a VMT lookup. */
2490 return NULL;
2491
2492 if (dump_file)
2493 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2494 " in %s/%i, making it unreachable.\n",
2495 ie->caller->name (), ie->caller->order);
2496 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2497 callee = cgraph_get_create_node (target);
2498 unreachable = true;
2499 }
2500 else
2501 callee = cgraph_get_node (target);
2502 }
2503 else
2504 callee = cgraph_get_node (target);
2505
2506 /* Because may-edges are not explicitely represented and vtable may be external,
2507 we may create the first reference to the object in the unit. */
2508 if (!callee || callee->global.inlined_to)
2509 {
2510
2511 /* We are better to ensure we can refer to it.
2512 In the case of static functions we are out of luck, since we already
2513 removed its body. In the case of public functions we may or may
2514 not introduce the reference. */
2515 if (!canonicalize_constructor_val (target, NULL)
2516 || !TREE_PUBLIC (target))
2517 {
2518 if (dump_file)
2519 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2520 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2521 xstrdup (ie->caller->name ()),
2522 ie->caller->order,
2523 xstrdup (ie->callee->name ()),
2524 ie->callee->order);
2525 return NULL;
2526 }
2527 callee = cgraph_get_create_node (target);
2528 }
2529 ipa_check_create_node_params ();
2530
2531 /* We can not make edges to inline clones. It is bug that someone removed
2532 the cgraph node too early. */
2533 gcc_assert (!callee->global.inlined_to);
2534
2535 if (dump_file && !unreachable)
2536 {
2537 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2538 "(%s/%i -> %s/%i), for stmt ",
2539 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2540 xstrdup (ie->caller->name ()),
2541 ie->caller->order,
2542 xstrdup (callee->name ()),
2543 callee->order);
2544 if (ie->call_stmt)
2545 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2546 else
2547 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2548 }
2549 ie = cgraph_make_edge_direct (ie, callee);
2550 es = inline_edge_summary (ie);
2551 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2552 - eni_size_weights.call_cost);
2553 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2554 - eni_time_weights.call_cost);
2555
2556 return ie;
2557 }
2558
2559 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2560 return NULL if there is not any. BY_REF specifies whether the value has to
2561 be passed by reference or by value. */
2562
2563 tree
2564 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2565 HOST_WIDE_INT offset, bool by_ref)
2566 {
2567 struct ipa_agg_jf_item *item;
2568 int i;
2569
2570 if (by_ref != agg->by_ref)
2571 return NULL;
2572
2573 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2574 if (item->offset == offset)
2575 {
2576 /* Currently we do not have clobber values, return NULL for them once
2577 we do. */
2578 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2579 return item->value;
2580 }
2581 return NULL;
2582 }
2583
2584 /* Remove a reference to SYMBOL from the list of references of a node given by
2585 reference description RDESC. Return true if the reference has been
2586 successfully found and removed. */
2587
2588 static bool
2589 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2590 {
2591 struct ipa_ref *to_del;
2592 struct cgraph_edge *origin;
2593
2594 origin = rdesc->cs;
2595 if (!origin)
2596 return false;
2597 to_del = ipa_find_reference (origin->caller, symbol,
2598 origin->call_stmt, origin->lto_stmt_uid);
2599 if (!to_del)
2600 return false;
2601
2602 ipa_remove_reference (to_del);
2603 if (dump_file)
2604 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2605 xstrdup (origin->caller->name ()),
2606 origin->caller->order, xstrdup (symbol->name ()));
2607 return true;
2608 }
2609
2610 /* If JFUNC has a reference description with refcount different from
2611 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2612 NULL. JFUNC must be a constant jump function. */
2613
2614 static struct ipa_cst_ref_desc *
2615 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2616 {
2617 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2618 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2619 return rdesc;
2620 else
2621 return NULL;
2622 }
2623
2624 /* If the value of constant jump function JFUNC is an address of a function
2625 declaration, return the associated call graph node. Otherwise return
2626 NULL. */
2627
2628 static cgraph_node *
2629 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2630 {
2631 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2632 tree cst = ipa_get_jf_constant (jfunc);
2633 if (TREE_CODE (cst) != ADDR_EXPR
2634 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2635 return NULL;
2636
2637 return cgraph_get_node (TREE_OPERAND (cst, 0));
2638 }
2639
2640
2641 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2642 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2643 the edge specified in the rdesc. Return false if either the symbol or the
2644 reference could not be found, otherwise return true. */
2645
2646 static bool
2647 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2648 {
2649 struct ipa_cst_ref_desc *rdesc;
2650 if (jfunc->type == IPA_JF_CONST
2651 && (rdesc = jfunc_rdesc_usable (jfunc))
2652 && --rdesc->refcount == 0)
2653 {
2654 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2655 if (!symbol)
2656 return false;
2657
2658 return remove_described_reference (symbol, rdesc);
2659 }
2660 return true;
2661 }
2662
2663 /* Try to find a destination for indirect edge IE that corresponds to a simple
2664 call or a call of a member function pointer and where the destination is a
2665 pointer formal parameter described by jump function JFUNC. If it can be
2666 determined, return the newly direct edge, otherwise return NULL.
2667 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2668
2669 static struct cgraph_edge *
2670 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2671 struct ipa_jump_func *jfunc,
2672 struct ipa_node_params *new_root_info)
2673 {
2674 struct cgraph_edge *cs;
2675 tree target;
2676 bool agg_contents = ie->indirect_info->agg_contents;
2677
2678 if (ie->indirect_info->agg_contents)
2679 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2680 ie->indirect_info->offset,
2681 ie->indirect_info->by_ref);
2682 else
2683 target = ipa_value_from_jfunc (new_root_info, jfunc);
2684 if (!target)
2685 return NULL;
2686 cs = ipa_make_edge_direct_to_target (ie, target);
2687
2688 if (cs && !agg_contents)
2689 {
2690 bool ok;
2691 gcc_checking_assert (cs->callee
2692 && (cs != ie
2693 || jfunc->type != IPA_JF_CONST
2694 || !cgraph_node_for_jfunc (jfunc)
2695 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2696 ok = try_decrement_rdesc_refcount (jfunc);
2697 gcc_checking_assert (ok);
2698 }
2699
2700 return cs;
2701 }
2702
2703 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2704 call based on a formal parameter which is described by jump function JFUNC
2705 and if it can be determined, make it direct and return the direct edge.
2706 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2707 are relative to. */
2708
2709 static struct cgraph_edge *
2710 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2711 struct ipa_jump_func *jfunc,
2712 struct ipa_node_params *new_root_info)
2713 {
2714 tree binfo, target;
2715
2716 if (!flag_devirtualize)
2717 return NULL;
2718
2719 /* First try to do lookup via known virtual table pointer value. */
2720 if (!ie->indirect_info->by_ref)
2721 {
2722 tree vtable;
2723 unsigned HOST_WIDE_INT offset;
2724 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2725 ie->indirect_info->offset,
2726 true);
2727 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2728 {
2729 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2730 vtable, offset);
2731 if (target)
2732 {
2733 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
2734 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
2735 || !possible_polymorphic_call_target_p
2736 (ie, cgraph_get_node (target)))
2737 {
2738 if (dump_file)
2739 fprintf (dump_file,
2740 "Type inconsident devirtualization: %s/%i->%s\n",
2741 ie->caller->name (), ie->caller->order,
2742 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2743 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2744 cgraph_get_create_node (target);
2745 }
2746 return ipa_make_edge_direct_to_target (ie, target);
2747 }
2748 }
2749 }
2750
2751 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2752
2753 if (!binfo)
2754 return NULL;
2755
2756 if (TREE_CODE (binfo) != TREE_BINFO)
2757 {
2758 ipa_polymorphic_call_context context;
2759 vec <cgraph_node *>targets;
2760 bool final;
2761
2762 if (!get_polymorphic_call_info_from_invariant
2763 (&context, binfo, ie->indirect_info->otr_type,
2764 ie->indirect_info->offset))
2765 return NULL;
2766 targets = possible_polymorphic_call_targets
2767 (ie->indirect_info->otr_type,
2768 ie->indirect_info->otr_token,
2769 context, &final);
2770 if (!final || targets.length () > 1)
2771 return NULL;
2772 if (targets.length () == 1)
2773 target = targets[0]->decl;
2774 else
2775 {
2776 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2777 cgraph_get_create_node (target);
2778 }
2779 }
2780 else
2781 {
2782 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2783 ie->indirect_info->otr_type);
2784 if (binfo)
2785 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2786 binfo);
2787 else
2788 return NULL;
2789 }
2790
2791 if (target)
2792 {
2793 #ifdef ENABLE_CHECKING
2794 gcc_assert (possible_polymorphic_call_target_p
2795 (ie, cgraph_get_node (target)));
2796 #endif
2797 return ipa_make_edge_direct_to_target (ie, target);
2798 }
2799 else
2800 return NULL;
2801 }
2802
2803 /* Update the param called notes associated with NODE when CS is being inlined,
2804 assuming NODE is (potentially indirectly) inlined into CS->callee.
2805 Moreover, if the callee is discovered to be constant, create a new cgraph
2806 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2807 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2808
2809 static bool
2810 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2811 struct cgraph_node *node,
2812 vec<cgraph_edge_p> *new_edges)
2813 {
2814 struct ipa_edge_args *top;
2815 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2816 struct ipa_node_params *new_root_info;
2817 bool res = false;
2818
2819 ipa_check_create_edge_args ();
2820 top = IPA_EDGE_REF (cs);
2821 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2822 ? cs->caller->global.inlined_to
2823 : cs->caller);
2824
2825 for (ie = node->indirect_calls; ie; ie = next_ie)
2826 {
2827 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2828 struct ipa_jump_func *jfunc;
2829 int param_index;
2830
2831 next_ie = ie->next_callee;
2832
2833 if (ici->param_index == -1)
2834 continue;
2835
2836 /* We must check range due to calls with variable number of arguments: */
2837 if (ici->param_index >= ipa_get_cs_argument_count (top))
2838 {
2839 ici->param_index = -1;
2840 continue;
2841 }
2842
2843 param_index = ici->param_index;
2844 jfunc = ipa_get_ith_jump_func (top, param_index);
2845
2846 if (!flag_indirect_inlining)
2847 new_direct_edge = NULL;
2848 else if (ici->polymorphic)
2849 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2850 new_root_info);
2851 else
2852 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2853 new_root_info);
2854 /* If speculation was removed, then we need to do nothing. */
2855 if (new_direct_edge && new_direct_edge != ie)
2856 {
2857 new_direct_edge->indirect_inlining_edge = 1;
2858 top = IPA_EDGE_REF (cs);
2859 res = true;
2860 }
2861 else if (new_direct_edge)
2862 {
2863 new_direct_edge->indirect_inlining_edge = 1;
2864 if (new_direct_edge->call_stmt)
2865 new_direct_edge->call_stmt_cannot_inline_p
2866 = !gimple_check_call_matching_types (
2867 new_direct_edge->call_stmt,
2868 new_direct_edge->callee->decl, false);
2869 if (new_edges)
2870 {
2871 new_edges->safe_push (new_direct_edge);
2872 res = true;
2873 }
2874 top = IPA_EDGE_REF (cs);
2875 }
2876 else if (jfunc->type == IPA_JF_PASS_THROUGH
2877 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2878 {
2879 if (ici->agg_contents
2880 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2881 ici->param_index = -1;
2882 else
2883 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2884 }
2885 else if (jfunc->type == IPA_JF_ANCESTOR)
2886 {
2887 if (ici->agg_contents
2888 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2889 ici->param_index = -1;
2890 else
2891 {
2892 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2893 if (ipa_get_jf_ancestor_offset (jfunc))
2894 ici->outer_type = NULL;
2895 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2896 }
2897 }
2898 else
2899 /* Either we can find a destination for this edge now or never. */
2900 ici->param_index = -1;
2901 }
2902
2903 return res;
2904 }
2905
2906 /* Recursively traverse subtree of NODE (including node) made of inlined
2907 cgraph_edges when CS has been inlined and invoke
2908 update_indirect_edges_after_inlining on all nodes and
2909 update_jump_functions_after_inlining on all non-inlined edges that lead out
2910 of this subtree. Newly discovered indirect edges will be added to
2911 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2912 created. */
2913
2914 static bool
2915 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2916 struct cgraph_node *node,
2917 vec<cgraph_edge_p> *new_edges)
2918 {
2919 struct cgraph_edge *e;
2920 bool res;
2921
2922 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2923
2924 for (e = node->callees; e; e = e->next_callee)
2925 if (!e->inline_failed)
2926 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2927 else
2928 update_jump_functions_after_inlining (cs, e);
2929 for (e = node->indirect_calls; e; e = e->next_callee)
2930 update_jump_functions_after_inlining (cs, e);
2931
2932 return res;
2933 }
2934
2935 /* Combine two controlled uses counts as done during inlining. */
2936
2937 static int
2938 combine_controlled_uses_counters (int c, int d)
2939 {
2940 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2941 return IPA_UNDESCRIBED_USE;
2942 else
2943 return c + d - 1;
2944 }
2945
2946 /* Propagate number of controlled users from CS->caleee to the new root of the
2947 tree of inlined nodes. */
2948
2949 static void
2950 propagate_controlled_uses (struct cgraph_edge *cs)
2951 {
2952 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2953 struct cgraph_node *new_root = cs->caller->global.inlined_to
2954 ? cs->caller->global.inlined_to : cs->caller;
2955 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2956 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2957 int count, i;
2958
2959 count = MIN (ipa_get_cs_argument_count (args),
2960 ipa_get_param_count (old_root_info));
2961 for (i = 0; i < count; i++)
2962 {
2963 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2964 struct ipa_cst_ref_desc *rdesc;
2965
2966 if (jf->type == IPA_JF_PASS_THROUGH)
2967 {
2968 int src_idx, c, d;
2969 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2970 c = ipa_get_controlled_uses (new_root_info, src_idx);
2971 d = ipa_get_controlled_uses (old_root_info, i);
2972
2973 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2974 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2975 c = combine_controlled_uses_counters (c, d);
2976 ipa_set_controlled_uses (new_root_info, src_idx, c);
2977 if (c == 0 && new_root_info->ipcp_orig_node)
2978 {
2979 struct cgraph_node *n;
2980 struct ipa_ref *ref;
2981 tree t = new_root_info->known_vals[src_idx];
2982
2983 if (t && TREE_CODE (t) == ADDR_EXPR
2984 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2985 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2986 && (ref = ipa_find_reference (new_root,
2987 n, NULL, 0)))
2988 {
2989 if (dump_file)
2990 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2991 "reference from %s/%i to %s/%i.\n",
2992 xstrdup (new_root->name ()),
2993 new_root->order,
2994 xstrdup (n->name ()), n->order);
2995 ipa_remove_reference (ref);
2996 }
2997 }
2998 }
2999 else if (jf->type == IPA_JF_CONST
3000 && (rdesc = jfunc_rdesc_usable (jf)))
3001 {
3002 int d = ipa_get_controlled_uses (old_root_info, i);
3003 int c = rdesc->refcount;
3004 rdesc->refcount = combine_controlled_uses_counters (c, d);
3005 if (rdesc->refcount == 0)
3006 {
3007 tree cst = ipa_get_jf_constant (jf);
3008 struct cgraph_node *n;
3009 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3010 && TREE_CODE (TREE_OPERAND (cst, 0))
3011 == FUNCTION_DECL);
3012 n = cgraph_get_node (TREE_OPERAND (cst, 0));
3013 if (n)
3014 {
3015 struct cgraph_node *clone;
3016 bool ok;
3017 ok = remove_described_reference (n, rdesc);
3018 gcc_checking_assert (ok);
3019
3020 clone = cs->caller;
3021 while (clone->global.inlined_to
3022 && clone != rdesc->cs->caller
3023 && IPA_NODE_REF (clone)->ipcp_orig_node)
3024 {
3025 struct ipa_ref *ref;
3026 ref = ipa_find_reference (clone,
3027 n, NULL, 0);
3028 if (ref)
3029 {
3030 if (dump_file)
3031 fprintf (dump_file, "ipa-prop: Removing "
3032 "cloning-created reference "
3033 "from %s/%i to %s/%i.\n",
3034 xstrdup (clone->name ()),
3035 clone->order,
3036 xstrdup (n->name ()),
3037 n->order);
3038 ipa_remove_reference (ref);
3039 }
3040 clone = clone->callers->caller;
3041 }
3042 }
3043 }
3044 }
3045 }
3046
3047 for (i = ipa_get_param_count (old_root_info);
3048 i < ipa_get_cs_argument_count (args);
3049 i++)
3050 {
3051 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3052
3053 if (jf->type == IPA_JF_CONST)
3054 {
3055 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3056 if (rdesc)
3057 rdesc->refcount = IPA_UNDESCRIBED_USE;
3058 }
3059 else if (jf->type == IPA_JF_PASS_THROUGH)
3060 ipa_set_controlled_uses (new_root_info,
3061 jf->value.pass_through.formal_id,
3062 IPA_UNDESCRIBED_USE);
3063 }
3064 }
3065
3066 /* Update jump functions and call note functions on inlining the call site CS.
3067 CS is expected to lead to a node already cloned by
3068 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3069 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3070 created. */
3071
3072 bool
3073 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3074 vec<cgraph_edge_p> *new_edges)
3075 {
3076 bool changed;
3077 /* Do nothing if the preparation phase has not been carried out yet
3078 (i.e. during early inlining). */
3079 if (!ipa_node_params_vector.exists ())
3080 return false;
3081 gcc_assert (ipa_edge_args_vector);
3082
3083 propagate_controlled_uses (cs);
3084 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3085
3086 return changed;
3087 }
3088
3089 /* Frees all dynamically allocated structures that the argument info points
3090 to. */
3091
3092 void
3093 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3094 {
3095 vec_free (args->jump_functions);
3096 memset (args, 0, sizeof (*args));
3097 }
3098
3099 /* Free all ipa_edge structures. */
3100
3101 void
3102 ipa_free_all_edge_args (void)
3103 {
3104 int i;
3105 struct ipa_edge_args *args;
3106
3107 if (!ipa_edge_args_vector)
3108 return;
3109
3110 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3111 ipa_free_edge_args_substructures (args);
3112
3113 vec_free (ipa_edge_args_vector);
3114 }
3115
3116 /* Frees all dynamically allocated structures that the param info points
3117 to. */
3118
3119 void
3120 ipa_free_node_params_substructures (struct ipa_node_params *info)
3121 {
3122 info->descriptors.release ();
3123 free (info->lattices);
3124 /* Lattice values and their sources are deallocated with their alocation
3125 pool. */
3126 info->known_vals.release ();
3127 memset (info, 0, sizeof (*info));
3128 }
3129
3130 /* Free all ipa_node_params structures. */
3131
3132 void
3133 ipa_free_all_node_params (void)
3134 {
3135 int i;
3136 struct ipa_node_params *info;
3137
3138 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3139 ipa_free_node_params_substructures (info);
3140
3141 ipa_node_params_vector.release ();
3142 }
3143
3144 /* Set the aggregate replacements of NODE to be AGGVALS. */
3145
3146 void
3147 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3148 struct ipa_agg_replacement_value *aggvals)
3149 {
3150 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3151 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3152
3153 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3154 }
3155
3156 /* Hook that is called by cgraph.c when an edge is removed. */
3157
3158 static void
3159 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3160 {
3161 struct ipa_edge_args *args;
3162
3163 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3164 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3165 return;
3166
3167 args = IPA_EDGE_REF (cs);
3168 if (args->jump_functions)
3169 {
3170 struct ipa_jump_func *jf;
3171 int i;
3172 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3173 {
3174 struct ipa_cst_ref_desc *rdesc;
3175 try_decrement_rdesc_refcount (jf);
3176 if (jf->type == IPA_JF_CONST
3177 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3178 && rdesc->cs == cs)
3179 rdesc->cs = NULL;
3180 }
3181 }
3182
3183 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3184 }
3185
3186 /* Hook that is called by cgraph.c when a node is removed. */
3187
3188 static void
3189 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3190 {
3191 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3192 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3193 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3194 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3195 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3196 }
3197
3198 /* Hook that is called by cgraph.c when an edge is duplicated. */
3199
3200 static void
3201 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3202 __attribute__((unused)) void *data)
3203 {
3204 struct ipa_edge_args *old_args, *new_args;
3205 unsigned int i;
3206
3207 ipa_check_create_edge_args ();
3208
3209 old_args = IPA_EDGE_REF (src);
3210 new_args = IPA_EDGE_REF (dst);
3211
3212 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3213
3214 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3215 {
3216 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3217 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3218
3219 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3220
3221 if (src_jf->type == IPA_JF_CONST)
3222 {
3223 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3224
3225 if (!src_rdesc)
3226 dst_jf->value.constant.rdesc = NULL;
3227 else if (src->caller == dst->caller)
3228 {
3229 struct ipa_ref *ref;
3230 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3231 gcc_checking_assert (n);
3232 ref = ipa_find_reference (src->caller, n,
3233 src->call_stmt, src->lto_stmt_uid);
3234 gcc_checking_assert (ref);
3235 ipa_clone_ref (ref, dst->caller, ref->stmt);
3236
3237 gcc_checking_assert (ipa_refdesc_pool);
3238 struct ipa_cst_ref_desc *dst_rdesc
3239 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3240 dst_rdesc->cs = dst;
3241 dst_rdesc->refcount = src_rdesc->refcount;
3242 dst_rdesc->next_duplicate = NULL;
3243 dst_jf->value.constant.rdesc = dst_rdesc;
3244 }
3245 else if (src_rdesc->cs == src)
3246 {
3247 struct ipa_cst_ref_desc *dst_rdesc;
3248 gcc_checking_assert (ipa_refdesc_pool);
3249 dst_rdesc
3250 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3251 dst_rdesc->cs = dst;
3252 dst_rdesc->refcount = src_rdesc->refcount;
3253 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3254 src_rdesc->next_duplicate = dst_rdesc;
3255 dst_jf->value.constant.rdesc = dst_rdesc;
3256 }
3257 else
3258 {
3259 struct ipa_cst_ref_desc *dst_rdesc;
3260 /* This can happen during inlining, when a JFUNC can refer to a
3261 reference taken in a function up in the tree of inline clones.
3262 We need to find the duplicate that refers to our tree of
3263 inline clones. */
3264
3265 gcc_assert (dst->caller->global.inlined_to);
3266 for (dst_rdesc = src_rdesc->next_duplicate;
3267 dst_rdesc;
3268 dst_rdesc = dst_rdesc->next_duplicate)
3269 {
3270 struct cgraph_node *top;
3271 top = dst_rdesc->cs->caller->global.inlined_to
3272 ? dst_rdesc->cs->caller->global.inlined_to
3273 : dst_rdesc->cs->caller;
3274 if (dst->caller->global.inlined_to == top)
3275 break;
3276 }
3277 gcc_assert (dst_rdesc);
3278 dst_jf->value.constant.rdesc = dst_rdesc;
3279 }
3280 }
3281 }
3282 }
3283
3284 /* Hook that is called by cgraph.c when a node is duplicated. */
3285
3286 static void
3287 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3288 ATTRIBUTE_UNUSED void *data)
3289 {
3290 struct ipa_node_params *old_info, *new_info;
3291 struct ipa_agg_replacement_value *old_av, *new_av;
3292
3293 ipa_check_create_node_params ();
3294 old_info = IPA_NODE_REF (src);
3295 new_info = IPA_NODE_REF (dst);
3296
3297 new_info->descriptors = old_info->descriptors.copy ();
3298 new_info->lattices = NULL;
3299 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3300
3301 new_info->uses_analysis_done = old_info->uses_analysis_done;
3302 new_info->node_enqueued = old_info->node_enqueued;
3303
3304 old_av = ipa_get_agg_replacements_for_node (src);
3305 if (!old_av)
3306 return;
3307
3308 new_av = NULL;
3309 while (old_av)
3310 {
3311 struct ipa_agg_replacement_value *v;
3312
3313 v = ggc_alloc_ipa_agg_replacement_value ();
3314 memcpy (v, old_av, sizeof (*v));
3315 v->next = new_av;
3316 new_av = v;
3317 old_av = old_av->next;
3318 }
3319 ipa_set_node_agg_value_chain (dst, new_av);
3320 }
3321
3322
3323 /* Analyze newly added function into callgraph. */
3324
3325 static void
3326 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3327 {
3328 if (cgraph_function_with_gimple_body_p (node))
3329 ipa_analyze_node (node);
3330 }
3331
3332 /* Register our cgraph hooks if they are not already there. */
3333
3334 void
3335 ipa_register_cgraph_hooks (void)
3336 {
3337 if (!edge_removal_hook_holder)
3338 edge_removal_hook_holder =
3339 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3340 if (!node_removal_hook_holder)
3341 node_removal_hook_holder =
3342 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3343 if (!edge_duplication_hook_holder)
3344 edge_duplication_hook_holder =
3345 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3346 if (!node_duplication_hook_holder)
3347 node_duplication_hook_holder =
3348 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3349 function_insertion_hook_holder =
3350 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3351 }
3352
3353 /* Unregister our cgraph hooks if they are not already there. */
3354
3355 static void
3356 ipa_unregister_cgraph_hooks (void)
3357 {
3358 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3359 edge_removal_hook_holder = NULL;
3360 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3361 node_removal_hook_holder = NULL;
3362 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3363 edge_duplication_hook_holder = NULL;
3364 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3365 node_duplication_hook_holder = NULL;
3366 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3367 function_insertion_hook_holder = NULL;
3368 }
3369
3370 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3371 longer needed after ipa-cp. */
3372
3373 void
3374 ipa_free_all_structures_after_ipa_cp (void)
3375 {
3376 if (!optimize)
3377 {
3378 ipa_free_all_edge_args ();
3379 ipa_free_all_node_params ();
3380 free_alloc_pool (ipcp_sources_pool);
3381 free_alloc_pool (ipcp_values_pool);
3382 free_alloc_pool (ipcp_agg_lattice_pool);
3383 ipa_unregister_cgraph_hooks ();
3384 if (ipa_refdesc_pool)
3385 free_alloc_pool (ipa_refdesc_pool);
3386 }
3387 }
3388
3389 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3390 longer needed after indirect inlining. */
3391
3392 void
3393 ipa_free_all_structures_after_iinln (void)
3394 {
3395 ipa_free_all_edge_args ();
3396 ipa_free_all_node_params ();
3397 ipa_unregister_cgraph_hooks ();
3398 if (ipcp_sources_pool)
3399 free_alloc_pool (ipcp_sources_pool);
3400 if (ipcp_values_pool)
3401 free_alloc_pool (ipcp_values_pool);
3402 if (ipcp_agg_lattice_pool)
3403 free_alloc_pool (ipcp_agg_lattice_pool);
3404 if (ipa_refdesc_pool)
3405 free_alloc_pool (ipa_refdesc_pool);
3406 }
3407
3408 /* Print ipa_tree_map data structures of all functions in the
3409 callgraph to F. */
3410
3411 void
3412 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3413 {
3414 int i, count;
3415 struct ipa_node_params *info;
3416
3417 if (!node->definition)
3418 return;
3419 info = IPA_NODE_REF (node);
3420 fprintf (f, " function %s/%i parameter descriptors:\n",
3421 node->name (), node->order);
3422 count = ipa_get_param_count (info);
3423 for (i = 0; i < count; i++)
3424 {
3425 int c;
3426
3427 fprintf (f, " ");
3428 ipa_dump_param (f, info, i);
3429 if (ipa_is_param_used (info, i))
3430 fprintf (f, " used");
3431 c = ipa_get_controlled_uses (info, i);
3432 if (c == IPA_UNDESCRIBED_USE)
3433 fprintf (f, " undescribed_use");
3434 else
3435 fprintf (f, " controlled_uses=%i", c);
3436 fprintf (f, "\n");
3437 }
3438 }
3439
3440 /* Print ipa_tree_map data structures of all functions in the
3441 callgraph to F. */
3442
3443 void
3444 ipa_print_all_params (FILE * f)
3445 {
3446 struct cgraph_node *node;
3447
3448 fprintf (f, "\nFunction parameters:\n");
3449 FOR_EACH_FUNCTION (node)
3450 ipa_print_node_params (f, node);
3451 }
3452
3453 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3454
3455 vec<tree>
3456 ipa_get_vector_of_formal_parms (tree fndecl)
3457 {
3458 vec<tree> args;
3459 int count;
3460 tree parm;
3461
3462 gcc_assert (!flag_wpa);
3463 count = count_formal_params (fndecl);
3464 args.create (count);
3465 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3466 args.quick_push (parm);
3467
3468 return args;
3469 }
3470
3471 /* Return a heap allocated vector containing types of formal parameters of
3472 function type FNTYPE. */
3473
3474 vec<tree>
3475 ipa_get_vector_of_formal_parm_types (tree fntype)
3476 {
3477 vec<tree> types;
3478 int count = 0;
3479 tree t;
3480
3481 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3482 count++;
3483
3484 types.create (count);
3485 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3486 types.quick_push (TREE_VALUE (t));
3487
3488 return types;
3489 }
3490
3491 /* Modify the function declaration FNDECL and its type according to the plan in
3492 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3493 to reflect the actual parameters being modified which are determined by the
3494 base_index field. */
3495
3496 void
3497 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3498 {
3499 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3500 tree orig_type = TREE_TYPE (fndecl);
3501 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3502
3503 /* The following test is an ugly hack, some functions simply don't have any
3504 arguments in their type. This is probably a bug but well... */
3505 bool care_for_types = (old_arg_types != NULL_TREE);
3506 bool last_parm_void;
3507 vec<tree> otypes;
3508 if (care_for_types)
3509 {
3510 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3511 == void_type_node);
3512 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3513 if (last_parm_void)
3514 gcc_assert (oparms.length () + 1 == otypes.length ());
3515 else
3516 gcc_assert (oparms.length () == otypes.length ());
3517 }
3518 else
3519 {
3520 last_parm_void = false;
3521 otypes.create (0);
3522 }
3523
3524 int len = adjustments.length ();
3525 tree *link = &DECL_ARGUMENTS (fndecl);
3526 tree new_arg_types = NULL;
3527 for (int i = 0; i < len; i++)
3528 {
3529 struct ipa_parm_adjustment *adj;
3530 gcc_assert (link);
3531
3532 adj = &adjustments[i];
3533 tree parm;
3534 if (adj->op == IPA_PARM_OP_NEW)
3535 parm = NULL;
3536 else
3537 parm = oparms[adj->base_index];
3538 adj->base = parm;
3539
3540 if (adj->op == IPA_PARM_OP_COPY)
3541 {
3542 if (care_for_types)
3543 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3544 new_arg_types);
3545 *link = parm;
3546 link = &DECL_CHAIN (parm);
3547 }
3548 else if (adj->op != IPA_PARM_OP_REMOVE)
3549 {
3550 tree new_parm;
3551 tree ptype;
3552
3553 if (adj->by_ref)
3554 ptype = build_pointer_type (adj->type);
3555 else
3556 {
3557 ptype = adj->type;
3558 if (is_gimple_reg_type (ptype))
3559 {
3560 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3561 if (TYPE_ALIGN (ptype) < malign)
3562 ptype = build_aligned_type (ptype, malign);
3563 }
3564 }
3565
3566 if (care_for_types)
3567 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3568
3569 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3570 ptype);
3571 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3572 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3573 DECL_ARTIFICIAL (new_parm) = 1;
3574 DECL_ARG_TYPE (new_parm) = ptype;
3575 DECL_CONTEXT (new_parm) = fndecl;
3576 TREE_USED (new_parm) = 1;
3577 DECL_IGNORED_P (new_parm) = 1;
3578 layout_decl (new_parm, 0);
3579
3580 if (adj->op == IPA_PARM_OP_NEW)
3581 adj->base = NULL;
3582 else
3583 adj->base = parm;
3584 adj->new_decl = new_parm;
3585
3586 *link = new_parm;
3587 link = &DECL_CHAIN (new_parm);
3588 }
3589 }
3590
3591 *link = NULL_TREE;
3592
3593 tree new_reversed = NULL;
3594 if (care_for_types)
3595 {
3596 new_reversed = nreverse (new_arg_types);
3597 if (last_parm_void)
3598 {
3599 if (new_reversed)
3600 TREE_CHAIN (new_arg_types) = void_list_node;
3601 else
3602 new_reversed = void_list_node;
3603 }
3604 }
3605
3606 /* Use copy_node to preserve as much as possible from original type
3607 (debug info, attribute lists etc.)
3608 Exception is METHOD_TYPEs must have THIS argument.
3609 When we are asked to remove it, we need to build new FUNCTION_TYPE
3610 instead. */
3611 tree new_type = NULL;
3612 if (TREE_CODE (orig_type) != METHOD_TYPE
3613 || (adjustments[0].op == IPA_PARM_OP_COPY
3614 && adjustments[0].base_index == 0))
3615 {
3616 new_type = build_distinct_type_copy (orig_type);
3617 TYPE_ARG_TYPES (new_type) = new_reversed;
3618 }
3619 else
3620 {
3621 new_type
3622 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3623 new_reversed));
3624 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3625 DECL_VINDEX (fndecl) = NULL_TREE;
3626 }
3627
3628 /* When signature changes, we need to clear builtin info. */
3629 if (DECL_BUILT_IN (fndecl))
3630 {
3631 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3632 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3633 }
3634
3635 /* This is a new type, not a copy of an old type. Need to reassociate
3636 variants. We can handle everything except the main variant lazily. */
3637 tree t = TYPE_MAIN_VARIANT (orig_type);
3638 if (orig_type != t)
3639 {
3640 TYPE_MAIN_VARIANT (new_type) = t;
3641 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3642 TYPE_NEXT_VARIANT (t) = new_type;
3643 }
3644 else
3645 {
3646 TYPE_MAIN_VARIANT (new_type) = new_type;
3647 TYPE_NEXT_VARIANT (new_type) = NULL;
3648 }
3649
3650 TREE_TYPE (fndecl) = new_type;
3651 DECL_VIRTUAL_P (fndecl) = 0;
3652 otypes.release ();
3653 oparms.release ();
3654 }
3655
3656 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3657 If this is a directly recursive call, CS must be NULL. Otherwise it must
3658 contain the corresponding call graph edge. */
3659
3660 void
3661 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3662 ipa_parm_adjustment_vec adjustments)
3663 {
3664 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3665 vec<tree> vargs;
3666 vec<tree, va_gc> **debug_args = NULL;
3667 gimple new_stmt;
3668 gimple_stmt_iterator gsi, prev_gsi;
3669 tree callee_decl;
3670 int i, len;
3671
3672 len = adjustments.length ();
3673 vargs.create (len);
3674 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3675 ipa_remove_stmt_references (current_node, stmt);
3676
3677 gsi = gsi_for_stmt (stmt);
3678 prev_gsi = gsi;
3679 gsi_prev (&prev_gsi);
3680 for (i = 0; i < len; i++)
3681 {
3682 struct ipa_parm_adjustment *adj;
3683
3684 adj = &adjustments[i];
3685
3686 if (adj->op == IPA_PARM_OP_COPY)
3687 {
3688 tree arg = gimple_call_arg (stmt, adj->base_index);
3689
3690 vargs.quick_push (arg);
3691 }
3692 else if (adj->op != IPA_PARM_OP_REMOVE)
3693 {
3694 tree expr, base, off;
3695 location_t loc;
3696 unsigned int deref_align = 0;
3697 bool deref_base = false;
3698
3699 /* We create a new parameter out of the value of the old one, we can
3700 do the following kind of transformations:
3701
3702 - A scalar passed by reference is converted to a scalar passed by
3703 value. (adj->by_ref is false and the type of the original
3704 actual argument is a pointer to a scalar).
3705
3706 - A part of an aggregate is passed instead of the whole aggregate.
3707 The part can be passed either by value or by reference, this is
3708 determined by value of adj->by_ref. Moreover, the code below
3709 handles both situations when the original aggregate is passed by
3710 value (its type is not a pointer) and when it is passed by
3711 reference (it is a pointer to an aggregate).
3712
3713 When the new argument is passed by reference (adj->by_ref is true)
3714 it must be a part of an aggregate and therefore we form it by
3715 simply taking the address of a reference inside the original
3716 aggregate. */
3717
3718 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3719 base = gimple_call_arg (stmt, adj->base_index);
3720 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3721 : EXPR_LOCATION (base);
3722
3723 if (TREE_CODE (base) != ADDR_EXPR
3724 && POINTER_TYPE_P (TREE_TYPE (base)))
3725 off = build_int_cst (adj->alias_ptr_type,
3726 adj->offset / BITS_PER_UNIT);
3727 else
3728 {
3729 HOST_WIDE_INT base_offset;
3730 tree prev_base;
3731 bool addrof;
3732
3733 if (TREE_CODE (base) == ADDR_EXPR)
3734 {
3735 base = TREE_OPERAND (base, 0);
3736 addrof = true;
3737 }
3738 else
3739 addrof = false;
3740 prev_base = base;
3741 base = get_addr_base_and_unit_offset (base, &base_offset);
3742 /* Aggregate arguments can have non-invariant addresses. */
3743 if (!base)
3744 {
3745 base = build_fold_addr_expr (prev_base);
3746 off = build_int_cst (adj->alias_ptr_type,
3747 adj->offset / BITS_PER_UNIT);
3748 }
3749 else if (TREE_CODE (base) == MEM_REF)
3750 {
3751 if (!addrof)
3752 {
3753 deref_base = true;
3754 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3755 }
3756 off = build_int_cst (adj->alias_ptr_type,
3757 base_offset
3758 + adj->offset / BITS_PER_UNIT);
3759 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3760 off);
3761 base = TREE_OPERAND (base, 0);
3762 }
3763 else
3764 {
3765 off = build_int_cst (adj->alias_ptr_type,
3766 base_offset
3767 + adj->offset / BITS_PER_UNIT);
3768 base = build_fold_addr_expr (base);
3769 }
3770 }
3771
3772 if (!adj->by_ref)
3773 {
3774 tree type = adj->type;
3775 unsigned int align;
3776 unsigned HOST_WIDE_INT misalign;
3777
3778 if (deref_base)
3779 {
3780 align = deref_align;
3781 misalign = 0;
3782 }
3783 else
3784 {
3785 get_pointer_alignment_1 (base, &align, &misalign);
3786 if (TYPE_ALIGN (type) > align)
3787 align = TYPE_ALIGN (type);
3788 }
3789 misalign += (tree_to_double_int (off)
3790 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3791 * BITS_PER_UNIT);
3792 misalign = misalign & (align - 1);
3793 if (misalign != 0)
3794 align = (misalign & -misalign);
3795 if (align < TYPE_ALIGN (type))
3796 type = build_aligned_type (type, align);
3797 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3798 }
3799 else
3800 {
3801 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3802 expr = build_fold_addr_expr (expr);
3803 }
3804
3805 expr = force_gimple_operand_gsi (&gsi, expr,
3806 adj->by_ref
3807 || is_gimple_reg_type (adj->type),
3808 NULL, true, GSI_SAME_STMT);
3809 vargs.quick_push (expr);
3810 }
3811 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
3812 {
3813 unsigned int ix;
3814 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3815 gimple def_temp;
3816
3817 arg = gimple_call_arg (stmt, adj->base_index);
3818 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3819 {
3820 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3821 continue;
3822 arg = fold_convert_loc (gimple_location (stmt),
3823 TREE_TYPE (origin), arg);
3824 }
3825 if (debug_args == NULL)
3826 debug_args = decl_debug_args_insert (callee_decl);
3827 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3828 if (ddecl == origin)
3829 {
3830 ddecl = (**debug_args)[ix + 1];
3831 break;
3832 }
3833 if (ddecl == NULL)
3834 {
3835 ddecl = make_node (DEBUG_EXPR_DECL);
3836 DECL_ARTIFICIAL (ddecl) = 1;
3837 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3838 DECL_MODE (ddecl) = DECL_MODE (origin);
3839
3840 vec_safe_push (*debug_args, origin);
3841 vec_safe_push (*debug_args, ddecl);
3842 }
3843 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3844 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3845 }
3846 }
3847
3848 if (dump_file && (dump_flags & TDF_DETAILS))
3849 {
3850 fprintf (dump_file, "replacing stmt:");
3851 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3852 }
3853
3854 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3855 vargs.release ();
3856 if (gimple_call_lhs (stmt))
3857 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3858
3859 gimple_set_block (new_stmt, gimple_block (stmt));
3860 if (gimple_has_location (stmt))
3861 gimple_set_location (new_stmt, gimple_location (stmt));
3862 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3863 gimple_call_copy_flags (new_stmt, stmt);
3864
3865 if (dump_file && (dump_flags & TDF_DETAILS))
3866 {
3867 fprintf (dump_file, "with stmt:");
3868 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3869 fprintf (dump_file, "\n");
3870 }
3871 gsi_replace (&gsi, new_stmt, true);
3872 if (cs)
3873 cgraph_set_call_stmt (cs, new_stmt);
3874 do
3875 {
3876 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3877 gsi_prev (&gsi);
3878 }
3879 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3880 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3881
3882 update_ssa (TODO_update_ssa);
3883 free_dominance_info (CDI_DOMINATORS);
3884 }
3885
3886 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3887 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3888 specifies whether the function should care about type incompatibility the
3889 current and new expressions. If it is false, the function will leave
3890 incompatibility issues to the caller. Return true iff the expression
3891 was modified. */
3892
3893 bool
3894 ipa_modify_expr (tree *expr, bool convert,
3895 ipa_parm_adjustment_vec adjustments)
3896 {
3897 struct ipa_parm_adjustment *cand
3898 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
3899 if (!cand)
3900 return false;
3901
3902 tree src;
3903 if (cand->by_ref)
3904 src = build_simple_mem_ref (cand->new_decl);
3905 else
3906 src = cand->new_decl;
3907
3908 if (dump_file && (dump_flags & TDF_DETAILS))
3909 {
3910 fprintf (dump_file, "About to replace expr ");
3911 print_generic_expr (dump_file, *expr, 0);
3912 fprintf (dump_file, " with ");
3913 print_generic_expr (dump_file, src, 0);
3914 fprintf (dump_file, "\n");
3915 }
3916
3917 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3918 {
3919 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3920 *expr = vce;
3921 }
3922 else
3923 *expr = src;
3924 return true;
3925 }
3926
3927 /* If T is an SSA_NAME, return NULL if it is not a default def or
3928 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
3929 the base variable is always returned, regardless if it is a default
3930 def. Return T if it is not an SSA_NAME. */
3931
3932 static tree
3933 get_ssa_base_param (tree t, bool ignore_default_def)
3934 {
3935 if (TREE_CODE (t) == SSA_NAME)
3936 {
3937 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
3938 return SSA_NAME_VAR (t);
3939 else
3940 return NULL_TREE;
3941 }
3942 return t;
3943 }
3944
3945 /* Given an expression, return an adjustment entry specifying the
3946 transformation to be done on EXPR. If no suitable adjustment entry
3947 was found, returns NULL.
3948
3949 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
3950 default def, otherwise bail on them.
3951
3952 If CONVERT is non-NULL, this function will set *CONVERT if the
3953 expression provided is a component reference. ADJUSTMENTS is the
3954 adjustments vector. */
3955
3956 ipa_parm_adjustment *
3957 ipa_get_adjustment_candidate (tree **expr, bool *convert,
3958 ipa_parm_adjustment_vec adjustments,
3959 bool ignore_default_def)
3960 {
3961 if (TREE_CODE (**expr) == BIT_FIELD_REF
3962 || TREE_CODE (**expr) == IMAGPART_EXPR
3963 || TREE_CODE (**expr) == REALPART_EXPR)
3964 {
3965 *expr = &TREE_OPERAND (**expr, 0);
3966 if (convert)
3967 *convert = true;
3968 }
3969
3970 HOST_WIDE_INT offset, size, max_size;
3971 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
3972 if (!base || size == -1 || max_size == -1)
3973 return NULL;
3974
3975 if (TREE_CODE (base) == MEM_REF)
3976 {
3977 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
3978 base = TREE_OPERAND (base, 0);
3979 }
3980
3981 base = get_ssa_base_param (base, ignore_default_def);
3982 if (!base || TREE_CODE (base) != PARM_DECL)
3983 return NULL;
3984
3985 struct ipa_parm_adjustment *cand = NULL;
3986 unsigned int len = adjustments.length ();
3987 for (unsigned i = 0; i < len; i++)
3988 {
3989 struct ipa_parm_adjustment *adj = &adjustments[i];
3990
3991 if (adj->base == base
3992 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
3993 {
3994 cand = adj;
3995 break;
3996 }
3997 }
3998
3999 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4000 return NULL;
4001 return cand;
4002 }
4003
4004 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4005
4006 static bool
4007 index_in_adjustments_multiple_times_p (int base_index,
4008 ipa_parm_adjustment_vec adjustments)
4009 {
4010 int i, len = adjustments.length ();
4011 bool one = false;
4012
4013 for (i = 0; i < len; i++)
4014 {
4015 struct ipa_parm_adjustment *adj;
4016 adj = &adjustments[i];
4017
4018 if (adj->base_index == base_index)
4019 {
4020 if (one)
4021 return true;
4022 else
4023 one = true;
4024 }
4025 }
4026 return false;
4027 }
4028
4029
4030 /* Return adjustments that should have the same effect on function parameters
4031 and call arguments as if they were first changed according to adjustments in
4032 INNER and then by adjustments in OUTER. */
4033
4034 ipa_parm_adjustment_vec
4035 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4036 ipa_parm_adjustment_vec outer)
4037 {
4038 int i, outlen = outer.length ();
4039 int inlen = inner.length ();
4040 int removals = 0;
4041 ipa_parm_adjustment_vec adjustments, tmp;
4042
4043 tmp.create (inlen);
4044 for (i = 0; i < inlen; i++)
4045 {
4046 struct ipa_parm_adjustment *n;
4047 n = &inner[i];
4048
4049 if (n->op == IPA_PARM_OP_REMOVE)
4050 removals++;
4051 else
4052 {
4053 /* FIXME: Handling of new arguments are not implemented yet. */
4054 gcc_assert (n->op != IPA_PARM_OP_NEW);
4055 tmp.quick_push (*n);
4056 }
4057 }
4058
4059 adjustments.create (outlen + removals);
4060 for (i = 0; i < outlen; i++)
4061 {
4062 struct ipa_parm_adjustment r;
4063 struct ipa_parm_adjustment *out = &outer[i];
4064 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4065
4066 memset (&r, 0, sizeof (r));
4067 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4068 if (out->op == IPA_PARM_OP_REMOVE)
4069 {
4070 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4071 {
4072 r.op = IPA_PARM_OP_REMOVE;
4073 adjustments.quick_push (r);
4074 }
4075 continue;
4076 }
4077 else
4078 {
4079 /* FIXME: Handling of new arguments are not implemented yet. */
4080 gcc_assert (out->op != IPA_PARM_OP_NEW);
4081 }
4082
4083 r.base_index = in->base_index;
4084 r.type = out->type;
4085
4086 /* FIXME: Create nonlocal value too. */
4087
4088 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4089 r.op = IPA_PARM_OP_COPY;
4090 else if (in->op == IPA_PARM_OP_COPY)
4091 r.offset = out->offset;
4092 else if (out->op == IPA_PARM_OP_COPY)
4093 r.offset = in->offset;
4094 else
4095 r.offset = in->offset + out->offset;
4096 adjustments.quick_push (r);
4097 }
4098
4099 for (i = 0; i < inlen; i++)
4100 {
4101 struct ipa_parm_adjustment *n = &inner[i];
4102
4103 if (n->op == IPA_PARM_OP_REMOVE)
4104 adjustments.quick_push (*n);
4105 }
4106
4107 tmp.release ();
4108 return adjustments;
4109 }
4110
4111 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4112 friendly way, assuming they are meant to be applied to FNDECL. */
4113
4114 void
4115 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4116 tree fndecl)
4117 {
4118 int i, len = adjustments.length ();
4119 bool first = true;
4120 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4121
4122 fprintf (file, "IPA param adjustments: ");
4123 for (i = 0; i < len; i++)
4124 {
4125 struct ipa_parm_adjustment *adj;
4126 adj = &adjustments[i];
4127
4128 if (!first)
4129 fprintf (file, " ");
4130 else
4131 first = false;
4132
4133 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4134 print_generic_expr (file, parms[adj->base_index], 0);
4135 if (adj->base)
4136 {
4137 fprintf (file, ", base: ");
4138 print_generic_expr (file, adj->base, 0);
4139 }
4140 if (adj->new_decl)
4141 {
4142 fprintf (file, ", new_decl: ");
4143 print_generic_expr (file, adj->new_decl, 0);
4144 }
4145 if (adj->new_ssa_base)
4146 {
4147 fprintf (file, ", new_ssa_base: ");
4148 print_generic_expr (file, adj->new_ssa_base, 0);
4149 }
4150
4151 if (adj->op == IPA_PARM_OP_COPY)
4152 fprintf (file, ", copy_param");
4153 else if (adj->op == IPA_PARM_OP_REMOVE)
4154 fprintf (file, ", remove_param");
4155 else
4156 fprintf (file, ", offset %li", (long) adj->offset);
4157 if (adj->by_ref)
4158 fprintf (file, ", by_ref");
4159 print_node_brief (file, ", type: ", adj->type, 0);
4160 fprintf (file, "\n");
4161 }
4162 parms.release ();
4163 }
4164
4165 /* Dump the AV linked list. */
4166
4167 void
4168 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4169 {
4170 bool comma = false;
4171 fprintf (f, " Aggregate replacements:");
4172 for (; av; av = av->next)
4173 {
4174 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4175 av->index, av->offset);
4176 print_generic_expr (f, av->value, 0);
4177 comma = true;
4178 }
4179 fprintf (f, "\n");
4180 }
4181
4182 /* Stream out jump function JUMP_FUNC to OB. */
4183
4184 static void
4185 ipa_write_jump_function (struct output_block *ob,
4186 struct ipa_jump_func *jump_func)
4187 {
4188 struct ipa_agg_jf_item *item;
4189 struct bitpack_d bp;
4190 int i, count;
4191
4192 streamer_write_uhwi (ob, jump_func->type);
4193 switch (jump_func->type)
4194 {
4195 case IPA_JF_UNKNOWN:
4196 break;
4197 case IPA_JF_KNOWN_TYPE:
4198 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4199 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4200 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4201 break;
4202 case IPA_JF_CONST:
4203 gcc_assert (
4204 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4205 stream_write_tree (ob, jump_func->value.constant.value, true);
4206 break;
4207 case IPA_JF_PASS_THROUGH:
4208 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4209 if (jump_func->value.pass_through.operation == NOP_EXPR)
4210 {
4211 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4212 bp = bitpack_create (ob->main_stream);
4213 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4214 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4215 streamer_write_bitpack (&bp);
4216 }
4217 else
4218 {
4219 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4220 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4221 }
4222 break;
4223 case IPA_JF_ANCESTOR:
4224 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4225 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4226 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4227 bp = bitpack_create (ob->main_stream);
4228 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4229 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4230 streamer_write_bitpack (&bp);
4231 break;
4232 }
4233
4234 count = vec_safe_length (jump_func->agg.items);
4235 streamer_write_uhwi (ob, count);
4236 if (count)
4237 {
4238 bp = bitpack_create (ob->main_stream);
4239 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4240 streamer_write_bitpack (&bp);
4241 }
4242
4243 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4244 {
4245 streamer_write_uhwi (ob, item->offset);
4246 stream_write_tree (ob, item->value, true);
4247 }
4248 }
4249
4250 /* Read in jump function JUMP_FUNC from IB. */
4251
4252 static void
4253 ipa_read_jump_function (struct lto_input_block *ib,
4254 struct ipa_jump_func *jump_func,
4255 struct cgraph_edge *cs,
4256 struct data_in *data_in)
4257 {
4258 enum jump_func_type jftype;
4259 enum tree_code operation;
4260 int i, count;
4261
4262 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4263 switch (jftype)
4264 {
4265 case IPA_JF_UNKNOWN:
4266 jump_func->type = IPA_JF_UNKNOWN;
4267 break;
4268 case IPA_JF_KNOWN_TYPE:
4269 {
4270 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4271 tree base_type = stream_read_tree (ib, data_in);
4272 tree component_type = stream_read_tree (ib, data_in);
4273
4274 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4275 break;
4276 }
4277 case IPA_JF_CONST:
4278 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4279 break;
4280 case IPA_JF_PASS_THROUGH:
4281 operation = (enum tree_code) streamer_read_uhwi (ib);
4282 if (operation == NOP_EXPR)
4283 {
4284 int formal_id = streamer_read_uhwi (ib);
4285 struct bitpack_d bp = streamer_read_bitpack (ib);
4286 bool agg_preserved = bp_unpack_value (&bp, 1);
4287 bool type_preserved = bp_unpack_value (&bp, 1);
4288 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4289 type_preserved);
4290 }
4291 else
4292 {
4293 tree operand = stream_read_tree (ib, data_in);
4294 int formal_id = streamer_read_uhwi (ib);
4295 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4296 operation);
4297 }
4298 break;
4299 case IPA_JF_ANCESTOR:
4300 {
4301 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4302 tree type = stream_read_tree (ib, data_in);
4303 int formal_id = streamer_read_uhwi (ib);
4304 struct bitpack_d bp = streamer_read_bitpack (ib);
4305 bool agg_preserved = bp_unpack_value (&bp, 1);
4306 bool type_preserved = bp_unpack_value (&bp, 1);
4307
4308 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4309 type_preserved);
4310 break;
4311 }
4312 }
4313
4314 count = streamer_read_uhwi (ib);
4315 vec_alloc (jump_func->agg.items, count);
4316 if (count)
4317 {
4318 struct bitpack_d bp = streamer_read_bitpack (ib);
4319 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4320 }
4321 for (i = 0; i < count; i++)
4322 {
4323 struct ipa_agg_jf_item item;
4324 item.offset = streamer_read_uhwi (ib);
4325 item.value = stream_read_tree (ib, data_in);
4326 jump_func->agg.items->quick_push (item);
4327 }
4328 }
4329
4330 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4331 relevant to indirect inlining to OB. */
4332
4333 static void
4334 ipa_write_indirect_edge_info (struct output_block *ob,
4335 struct cgraph_edge *cs)
4336 {
4337 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4338 struct bitpack_d bp;
4339
4340 streamer_write_hwi (ob, ii->param_index);
4341 streamer_write_hwi (ob, ii->offset);
4342 bp = bitpack_create (ob->main_stream);
4343 bp_pack_value (&bp, ii->polymorphic, 1);
4344 bp_pack_value (&bp, ii->agg_contents, 1);
4345 bp_pack_value (&bp, ii->member_ptr, 1);
4346 bp_pack_value (&bp, ii->by_ref, 1);
4347 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4348 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4349 streamer_write_bitpack (&bp);
4350
4351 if (ii->polymorphic)
4352 {
4353 streamer_write_hwi (ob, ii->otr_token);
4354 stream_write_tree (ob, ii->otr_type, true);
4355 stream_write_tree (ob, ii->outer_type, true);
4356 }
4357 }
4358
4359 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4360 relevant to indirect inlining from IB. */
4361
4362 static void
4363 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4364 struct data_in *data_in ATTRIBUTE_UNUSED,
4365 struct cgraph_edge *cs)
4366 {
4367 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4368 struct bitpack_d bp;
4369
4370 ii->param_index = (int) streamer_read_hwi (ib);
4371 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4372 bp = streamer_read_bitpack (ib);
4373 ii->polymorphic = bp_unpack_value (&bp, 1);
4374 ii->agg_contents = bp_unpack_value (&bp, 1);
4375 ii->member_ptr = bp_unpack_value (&bp, 1);
4376 ii->by_ref = bp_unpack_value (&bp, 1);
4377 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4378 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4379 if (ii->polymorphic)
4380 {
4381 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4382 ii->otr_type = stream_read_tree (ib, data_in);
4383 ii->outer_type = stream_read_tree (ib, data_in);
4384 }
4385 }
4386
4387 /* Stream out NODE info to OB. */
4388
4389 static void
4390 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4391 {
4392 int node_ref;
4393 lto_symtab_encoder_t encoder;
4394 struct ipa_node_params *info = IPA_NODE_REF (node);
4395 int j;
4396 struct cgraph_edge *e;
4397 struct bitpack_d bp;
4398
4399 encoder = ob->decl_state->symtab_node_encoder;
4400 node_ref = lto_symtab_encoder_encode (encoder, node);
4401 streamer_write_uhwi (ob, node_ref);
4402
4403 streamer_write_uhwi (ob, ipa_get_param_count (info));
4404 for (j = 0; j < ipa_get_param_count (info); j++)
4405 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4406 bp = bitpack_create (ob->main_stream);
4407 gcc_assert (info->uses_analysis_done
4408 || ipa_get_param_count (info) == 0);
4409 gcc_assert (!info->node_enqueued);
4410 gcc_assert (!info->ipcp_orig_node);
4411 for (j = 0; j < ipa_get_param_count (info); j++)
4412 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4413 streamer_write_bitpack (&bp);
4414 for (j = 0; j < ipa_get_param_count (info); j++)
4415 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4416 for (e = node->callees; e; e = e->next_callee)
4417 {
4418 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4419
4420 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4421 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4422 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4423 }
4424 for (e = node->indirect_calls; e; e = e->next_callee)
4425 {
4426 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4427
4428 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4429 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4430 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4431 ipa_write_indirect_edge_info (ob, e);
4432 }
4433 }
4434
4435 /* Stream in NODE info from IB. */
4436
4437 static void
4438 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4439 struct data_in *data_in)
4440 {
4441 struct ipa_node_params *info = IPA_NODE_REF (node);
4442 int k;
4443 struct cgraph_edge *e;
4444 struct bitpack_d bp;
4445
4446 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4447
4448 for (k = 0; k < ipa_get_param_count (info); k++)
4449 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4450
4451 bp = streamer_read_bitpack (ib);
4452 if (ipa_get_param_count (info) != 0)
4453 info->uses_analysis_done = true;
4454 info->node_enqueued = false;
4455 for (k = 0; k < ipa_get_param_count (info); k++)
4456 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4457 for (k = 0; k < ipa_get_param_count (info); k++)
4458 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4459 for (e = node->callees; e; e = e->next_callee)
4460 {
4461 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4462 int count = streamer_read_uhwi (ib);
4463
4464 if (!count)
4465 continue;
4466 vec_safe_grow_cleared (args->jump_functions, count);
4467
4468 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4469 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4470 data_in);
4471 }
4472 for (e = node->indirect_calls; e; e = e->next_callee)
4473 {
4474 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4475 int count = streamer_read_uhwi (ib);
4476
4477 if (count)
4478 {
4479 vec_safe_grow_cleared (args->jump_functions, count);
4480 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4481 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4482 data_in);
4483 }
4484 ipa_read_indirect_edge_info (ib, data_in, e);
4485 }
4486 }
4487
4488 /* Write jump functions for nodes in SET. */
4489
4490 void
4491 ipa_prop_write_jump_functions (void)
4492 {
4493 struct cgraph_node *node;
4494 struct output_block *ob;
4495 unsigned int count = 0;
4496 lto_symtab_encoder_iterator lsei;
4497 lto_symtab_encoder_t encoder;
4498
4499
4500 if (!ipa_node_params_vector.exists ())
4501 return;
4502
4503 ob = create_output_block (LTO_section_jump_functions);
4504 encoder = ob->decl_state->symtab_node_encoder;
4505 ob->cgraph_node = NULL;
4506 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4507 lsei_next_function_in_partition (&lsei))
4508 {
4509 node = lsei_cgraph_node (lsei);
4510 if (cgraph_function_with_gimple_body_p (node)
4511 && IPA_NODE_REF (node) != NULL)
4512 count++;
4513 }
4514
4515 streamer_write_uhwi (ob, count);
4516
4517 /* Process all of the functions. */
4518 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4519 lsei_next_function_in_partition (&lsei))
4520 {
4521 node = lsei_cgraph_node (lsei);
4522 if (cgraph_function_with_gimple_body_p (node)
4523 && IPA_NODE_REF (node) != NULL)
4524 ipa_write_node_info (ob, node);
4525 }
4526 streamer_write_char_stream (ob->main_stream, 0);
4527 produce_asm (ob, NULL);
4528 destroy_output_block (ob);
4529 }
4530
4531 /* Read section in file FILE_DATA of length LEN with data DATA. */
4532
4533 static void
4534 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4535 size_t len)
4536 {
4537 const struct lto_function_header *header =
4538 (const struct lto_function_header *) data;
4539 const int cfg_offset = sizeof (struct lto_function_header);
4540 const int main_offset = cfg_offset + header->cfg_size;
4541 const int string_offset = main_offset + header->main_size;
4542 struct data_in *data_in;
4543 struct lto_input_block ib_main;
4544 unsigned int i;
4545 unsigned int count;
4546
4547 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4548 header->main_size);
4549
4550 data_in =
4551 lto_data_in_create (file_data, (const char *) data + string_offset,
4552 header->string_size, vNULL);
4553 count = streamer_read_uhwi (&ib_main);
4554
4555 for (i = 0; i < count; i++)
4556 {
4557 unsigned int index;
4558 struct cgraph_node *node;
4559 lto_symtab_encoder_t encoder;
4560
4561 index = streamer_read_uhwi (&ib_main);
4562 encoder = file_data->symtab_node_encoder;
4563 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4564 gcc_assert (node->definition);
4565 ipa_read_node_info (&ib_main, node, data_in);
4566 }
4567 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4568 len);
4569 lto_data_in_delete (data_in);
4570 }
4571
4572 /* Read ipcp jump functions. */
4573
4574 void
4575 ipa_prop_read_jump_functions (void)
4576 {
4577 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4578 struct lto_file_decl_data *file_data;
4579 unsigned int j = 0;
4580
4581 ipa_check_create_node_params ();
4582 ipa_check_create_edge_args ();
4583 ipa_register_cgraph_hooks ();
4584
4585 while ((file_data = file_data_vec[j++]))
4586 {
4587 size_t len;
4588 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4589
4590 if (data)
4591 ipa_prop_read_section (file_data, data, len);
4592 }
4593 }
4594
4595 /* After merging units, we can get mismatch in argument counts.
4596 Also decl merging might've rendered parameter lists obsolete.
4597 Also compute called_with_variable_arg info. */
4598
4599 void
4600 ipa_update_after_lto_read (void)
4601 {
4602 ipa_check_create_node_params ();
4603 ipa_check_create_edge_args ();
4604 }
4605
4606 void
4607 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4608 {
4609 int node_ref;
4610 unsigned int count = 0;
4611 lto_symtab_encoder_t encoder;
4612 struct ipa_agg_replacement_value *aggvals, *av;
4613
4614 aggvals = ipa_get_agg_replacements_for_node (node);
4615 encoder = ob->decl_state->symtab_node_encoder;
4616 node_ref = lto_symtab_encoder_encode (encoder, node);
4617 streamer_write_uhwi (ob, node_ref);
4618
4619 for (av = aggvals; av; av = av->next)
4620 count++;
4621 streamer_write_uhwi (ob, count);
4622
4623 for (av = aggvals; av; av = av->next)
4624 {
4625 struct bitpack_d bp;
4626
4627 streamer_write_uhwi (ob, av->offset);
4628 streamer_write_uhwi (ob, av->index);
4629 stream_write_tree (ob, av->value, true);
4630
4631 bp = bitpack_create (ob->main_stream);
4632 bp_pack_value (&bp, av->by_ref, 1);
4633 streamer_write_bitpack (&bp);
4634 }
4635 }
4636
4637 /* Stream in the aggregate value replacement chain for NODE from IB. */
4638
4639 static void
4640 read_agg_replacement_chain (struct lto_input_block *ib,
4641 struct cgraph_node *node,
4642 struct data_in *data_in)
4643 {
4644 struct ipa_agg_replacement_value *aggvals = NULL;
4645 unsigned int count, i;
4646
4647 count = streamer_read_uhwi (ib);
4648 for (i = 0; i <count; i++)
4649 {
4650 struct ipa_agg_replacement_value *av;
4651 struct bitpack_d bp;
4652
4653 av = ggc_alloc_ipa_agg_replacement_value ();
4654 av->offset = streamer_read_uhwi (ib);
4655 av->index = streamer_read_uhwi (ib);
4656 av->value = stream_read_tree (ib, data_in);
4657 bp = streamer_read_bitpack (ib);
4658 av->by_ref = bp_unpack_value (&bp, 1);
4659 av->next = aggvals;
4660 aggvals = av;
4661 }
4662 ipa_set_node_agg_value_chain (node, aggvals);
4663 }
4664
4665 /* Write all aggregate replacement for nodes in set. */
4666
4667 void
4668 ipa_prop_write_all_agg_replacement (void)
4669 {
4670 struct cgraph_node *node;
4671 struct output_block *ob;
4672 unsigned int count = 0;
4673 lto_symtab_encoder_iterator lsei;
4674 lto_symtab_encoder_t encoder;
4675
4676 if (!ipa_node_agg_replacements)
4677 return;
4678
4679 ob = create_output_block (LTO_section_ipcp_transform);
4680 encoder = ob->decl_state->symtab_node_encoder;
4681 ob->cgraph_node = NULL;
4682 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4683 lsei_next_function_in_partition (&lsei))
4684 {
4685 node = lsei_cgraph_node (lsei);
4686 if (cgraph_function_with_gimple_body_p (node)
4687 && ipa_get_agg_replacements_for_node (node) != NULL)
4688 count++;
4689 }
4690
4691 streamer_write_uhwi (ob, count);
4692
4693 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4694 lsei_next_function_in_partition (&lsei))
4695 {
4696 node = lsei_cgraph_node (lsei);
4697 if (cgraph_function_with_gimple_body_p (node)
4698 && ipa_get_agg_replacements_for_node (node) != NULL)
4699 write_agg_replacement_chain (ob, node);
4700 }
4701 streamer_write_char_stream (ob->main_stream, 0);
4702 produce_asm (ob, NULL);
4703 destroy_output_block (ob);
4704 }
4705
4706 /* Read replacements section in file FILE_DATA of length LEN with data
4707 DATA. */
4708
4709 static void
4710 read_replacements_section (struct lto_file_decl_data *file_data,
4711 const char *data,
4712 size_t len)
4713 {
4714 const struct lto_function_header *header =
4715 (const struct lto_function_header *) data;
4716 const int cfg_offset = sizeof (struct lto_function_header);
4717 const int main_offset = cfg_offset + header->cfg_size;
4718 const int string_offset = main_offset + header->main_size;
4719 struct data_in *data_in;
4720 struct lto_input_block ib_main;
4721 unsigned int i;
4722 unsigned int count;
4723
4724 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4725 header->main_size);
4726
4727 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4728 header->string_size, vNULL);
4729 count = streamer_read_uhwi (&ib_main);
4730
4731 for (i = 0; i < count; i++)
4732 {
4733 unsigned int index;
4734 struct cgraph_node *node;
4735 lto_symtab_encoder_t encoder;
4736
4737 index = streamer_read_uhwi (&ib_main);
4738 encoder = file_data->symtab_node_encoder;
4739 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4740 gcc_assert (node->definition);
4741 read_agg_replacement_chain (&ib_main, node, data_in);
4742 }
4743 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4744 len);
4745 lto_data_in_delete (data_in);
4746 }
4747
4748 /* Read IPA-CP aggregate replacements. */
4749
4750 void
4751 ipa_prop_read_all_agg_replacement (void)
4752 {
4753 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4754 struct lto_file_decl_data *file_data;
4755 unsigned int j = 0;
4756
4757 while ((file_data = file_data_vec[j++]))
4758 {
4759 size_t len;
4760 const char *data = lto_get_section_data (file_data,
4761 LTO_section_ipcp_transform,
4762 NULL, &len);
4763 if (data)
4764 read_replacements_section (file_data, data, len);
4765 }
4766 }
4767
4768 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4769 NODE. */
4770
4771 static void
4772 adjust_agg_replacement_values (struct cgraph_node *node,
4773 struct ipa_agg_replacement_value *aggval)
4774 {
4775 struct ipa_agg_replacement_value *v;
4776 int i, c = 0, d = 0, *adj;
4777
4778 if (!node->clone.combined_args_to_skip)
4779 return;
4780
4781 for (v = aggval; v; v = v->next)
4782 {
4783 gcc_assert (v->index >= 0);
4784 if (c < v->index)
4785 c = v->index;
4786 }
4787 c++;
4788
4789 adj = XALLOCAVEC (int, c);
4790 for (i = 0; i < c; i++)
4791 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4792 {
4793 adj[i] = -1;
4794 d++;
4795 }
4796 else
4797 adj[i] = i - d;
4798
4799 for (v = aggval; v; v = v->next)
4800 v->index = adj[v->index];
4801 }
4802
4803
4804 /* Function body transformation phase. */
4805
4806 unsigned int
4807 ipcp_transform_function (struct cgraph_node *node)
4808 {
4809 vec<ipa_param_descriptor> descriptors = vNULL;
4810 struct param_analysis_info *parms_ainfo;
4811 struct ipa_agg_replacement_value *aggval;
4812 gimple_stmt_iterator gsi;
4813 basic_block bb;
4814 int param_count;
4815 bool cfg_changed = false, something_changed = false;
4816
4817 gcc_checking_assert (cfun);
4818 gcc_checking_assert (current_function_decl);
4819
4820 if (dump_file)
4821 fprintf (dump_file, "Modification phase of node %s/%i\n",
4822 node->name (), node->order);
4823
4824 aggval = ipa_get_agg_replacements_for_node (node);
4825 if (!aggval)
4826 return 0;
4827 param_count = count_formal_params (node->decl);
4828 if (param_count == 0)
4829 return 0;
4830 adjust_agg_replacement_values (node, aggval);
4831 if (dump_file)
4832 ipa_dump_agg_replacement_values (dump_file, aggval);
4833 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4834 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4835 descriptors.safe_grow_cleared (param_count);
4836 ipa_populate_param_decls (node, descriptors);
4837
4838 FOR_EACH_BB_FN (bb, cfun)
4839 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4840 {
4841 struct ipa_agg_replacement_value *v;
4842 gimple stmt = gsi_stmt (gsi);
4843 tree rhs, val, t;
4844 HOST_WIDE_INT offset, size;
4845 int index;
4846 bool by_ref, vce;
4847
4848 if (!gimple_assign_load_p (stmt))
4849 continue;
4850 rhs = gimple_assign_rhs1 (stmt);
4851 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4852 continue;
4853
4854 vce = false;
4855 t = rhs;
4856 while (handled_component_p (t))
4857 {
4858 /* V_C_E can do things like convert an array of integers to one
4859 bigger integer and similar things we do not handle below. */
4860 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4861 {
4862 vce = true;
4863 break;
4864 }
4865 t = TREE_OPERAND (t, 0);
4866 }
4867 if (vce)
4868 continue;
4869
4870 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4871 rhs, &index, &offset, &size, &by_ref))
4872 continue;
4873 for (v = aggval; v; v = v->next)
4874 if (v->index == index
4875 && v->offset == offset)
4876 break;
4877 if (!v
4878 || v->by_ref != by_ref
4879 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4880 continue;
4881
4882 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4883 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4884 {
4885 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4886 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4887 else if (TYPE_SIZE (TREE_TYPE (rhs))
4888 == TYPE_SIZE (TREE_TYPE (v->value)))
4889 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4890 else
4891 {
4892 if (dump_file)
4893 {
4894 fprintf (dump_file, " const ");
4895 print_generic_expr (dump_file, v->value, 0);
4896 fprintf (dump_file, " can't be converted to type of ");
4897 print_generic_expr (dump_file, rhs, 0);
4898 fprintf (dump_file, "\n");
4899 }
4900 continue;
4901 }
4902 }
4903 else
4904 val = v->value;
4905
4906 if (dump_file && (dump_flags & TDF_DETAILS))
4907 {
4908 fprintf (dump_file, "Modifying stmt:\n ");
4909 print_gimple_stmt (dump_file, stmt, 0, 0);
4910 }
4911 gimple_assign_set_rhs_from_tree (&gsi, val);
4912 update_stmt (stmt);
4913
4914 if (dump_file && (dump_flags & TDF_DETAILS))
4915 {
4916 fprintf (dump_file, "into:\n ");
4917 print_gimple_stmt (dump_file, stmt, 0, 0);
4918 fprintf (dump_file, "\n");
4919 }
4920
4921 something_changed = true;
4922 if (maybe_clean_eh_stmt (stmt)
4923 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4924 cfg_changed = true;
4925 }
4926
4927 (*ipa_node_agg_replacements)[node->uid] = NULL;
4928 free_parms_ainfo (parms_ainfo, param_count);
4929 descriptors.release ();
4930
4931 if (!something_changed)
4932 return 0;
4933 else if (cfg_changed)
4934 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4935 else
4936 return TODO_update_ssa_only_virtuals;
4937 }