ipa-devirt.c (subbinfo_with_vtable_at_offset, [...]): New functions.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60
61 /* Intermediate information about a parameter that is only useful during the
62 run of ipa_analyze_node and is not kept afterwards. */
63
64 struct param_analysis_info
65 {
66 bool parm_modified, ref_modified, pt_modified;
67 bitmap parm_visited_statements, pt_visited_statements;
68 };
69
70 /* Vector where the parameter infos are actually stored. */
71 vec<ipa_node_params> ipa_node_params_vector;
72 /* Vector of known aggregate values in cloned nodes. */
73 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
74 /* Vector where the parameter infos are actually stored. */
75 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
76
77 /* Holders of ipa cgraph hooks: */
78 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
79 static struct cgraph_node_hook_list *node_removal_hook_holder;
80 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
81 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
82 static struct cgraph_node_hook_list *function_insertion_hook_holder;
83
84 /* Description of a reference to an IPA constant. */
85 struct ipa_cst_ref_desc
86 {
87 /* Edge that corresponds to the statement which took the reference. */
88 struct cgraph_edge *cs;
89 /* Linked list of duplicates created when call graph edges are cloned. */
90 struct ipa_cst_ref_desc *next_duplicate;
91 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
92 if out of control. */
93 int refcount;
94 };
95
96 /* Allocation pool for reference descriptions. */
97
98 static alloc_pool ipa_refdesc_pool;
99
100 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
101 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
102
103 static bool
104 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
105 {
106 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
107 struct cl_optimization *os;
108
109 if (!fs_opts)
110 return false;
111 os = TREE_OPTIMIZATION (fs_opts);
112 return !os->x_optimize || !os->x_flag_ipa_cp;
113 }
114
115 /* Return index of the formal whose tree is PTREE in function which corresponds
116 to INFO. */
117
118 static int
119 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
120 {
121 int i, count;
122
123 count = descriptors.length ();
124 for (i = 0; i < count; i++)
125 if (descriptors[i].decl == ptree)
126 return i;
127
128 return -1;
129 }
130
131 /* Return index of the formal whose tree is PTREE in function which corresponds
132 to INFO. */
133
134 int
135 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
136 {
137 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
138 }
139
140 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
141 NODE. */
142
143 static void
144 ipa_populate_param_decls (struct cgraph_node *node,
145 vec<ipa_param_descriptor> &descriptors)
146 {
147 tree fndecl;
148 tree fnargs;
149 tree parm;
150 int param_num;
151
152 fndecl = node->decl;
153 gcc_assert (gimple_has_body_p (fndecl));
154 fnargs = DECL_ARGUMENTS (fndecl);
155 param_num = 0;
156 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
157 {
158 descriptors[param_num].decl = parm;
159 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
160 param_num++;
161 }
162 }
163
164 /* Return how many formal parameters FNDECL has. */
165
166 static inline int
167 count_formal_params (tree fndecl)
168 {
169 tree parm;
170 int count = 0;
171 gcc_assert (gimple_has_body_p (fndecl));
172
173 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
174 count++;
175
176 return count;
177 }
178
179 /* Return the declaration of Ith formal parameter of the function corresponding
180 to INFO. Note there is no setter function as this array is built just once
181 using ipa_initialize_node_params. */
182
183 void
184 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
185 {
186 fprintf (file, "param #%i", i);
187 if (info->descriptors[i].decl)
188 {
189 fprintf (file, " ");
190 print_generic_expr (file, info->descriptors[i].decl, 0);
191 }
192 }
193
194 /* Initialize the ipa_node_params structure associated with NODE
195 to hold PARAM_COUNT parameters. */
196
197 void
198 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
199 {
200 struct ipa_node_params *info = IPA_NODE_REF (node);
201
202 if (!info->descriptors.exists () && param_count)
203 info->descriptors.safe_grow_cleared (param_count);
204 }
205
206 /* Initialize the ipa_node_params structure associated with NODE by counting
207 the function parameters, creating the descriptors and populating their
208 param_decls. */
209
210 void
211 ipa_initialize_node_params (struct cgraph_node *node)
212 {
213 struct ipa_node_params *info = IPA_NODE_REF (node);
214
215 if (!info->descriptors.exists ())
216 {
217 ipa_alloc_node_params (node, count_formal_params (node->decl));
218 ipa_populate_param_decls (node, info->descriptors);
219 }
220 }
221
222 /* Print the jump functions associated with call graph edge CS to file F. */
223
224 static void
225 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
226 {
227 int i, count;
228
229 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
230 for (i = 0; i < count; i++)
231 {
232 struct ipa_jump_func *jump_func;
233 enum jump_func_type type;
234
235 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
236 type = jump_func->type;
237
238 fprintf (f, " param %d: ", i);
239 if (type == IPA_JF_UNKNOWN)
240 fprintf (f, "UNKNOWN\n");
241 else if (type == IPA_JF_KNOWN_TYPE)
242 {
243 fprintf (f, "KNOWN TYPE: base ");
244 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
245 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
246 jump_func->value.known_type.offset);
247 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
248 fprintf (f, "\n");
249 }
250 else if (type == IPA_JF_CONST)
251 {
252 tree val = jump_func->value.constant.value;
253 fprintf (f, "CONST: ");
254 print_generic_expr (f, val, 0);
255 if (TREE_CODE (val) == ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
257 {
258 fprintf (f, " -> ");
259 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
260 0);
261 }
262 fprintf (f, "\n");
263 }
264 else if (type == IPA_JF_PASS_THROUGH)
265 {
266 fprintf (f, "PASS THROUGH: ");
267 fprintf (f, "%d, op %s",
268 jump_func->value.pass_through.formal_id,
269 get_tree_code_name(jump_func->value.pass_through.operation));
270 if (jump_func->value.pass_through.operation != NOP_EXPR)
271 {
272 fprintf (f, " ");
273 print_generic_expr (f,
274 jump_func->value.pass_through.operand, 0);
275 }
276 if (jump_func->value.pass_through.agg_preserved)
277 fprintf (f, ", agg_preserved");
278 if (jump_func->value.pass_through.type_preserved)
279 fprintf (f, ", type_preserved");
280 fprintf (f, "\n");
281 }
282 else if (type == IPA_JF_ANCESTOR)
283 {
284 fprintf (f, "ANCESTOR: ");
285 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
286 jump_func->value.ancestor.formal_id,
287 jump_func->value.ancestor.offset);
288 print_generic_expr (f, jump_func->value.ancestor.type, 0);
289 if (jump_func->value.ancestor.agg_preserved)
290 fprintf (f, ", agg_preserved");
291 if (jump_func->value.ancestor.type_preserved)
292 fprintf (f, ", type_preserved");
293 fprintf (f, "\n");
294 }
295
296 if (jump_func->agg.items)
297 {
298 struct ipa_agg_jf_item *item;
299 int j;
300
301 fprintf (f, " Aggregate passed by %s:\n",
302 jump_func->agg.by_ref ? "reference" : "value");
303 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
304 {
305 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
306 item->offset);
307 if (TYPE_P (item->value))
308 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
309 tree_to_uhwi (TYPE_SIZE (item->value)));
310 else
311 {
312 fprintf (f, "cst: ");
313 print_generic_expr (f, item->value, 0);
314 }
315 fprintf (f, "\n");
316 }
317 }
318 }
319 }
320
321
322 /* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
324
325 void
326 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
327 {
328 struct cgraph_edge *cs;
329
330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
331 node->order);
332 for (cs = node->callees; cs; cs = cs->next_callee)
333 {
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
336
337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
338 xstrdup (node->name ()), node->order,
339 xstrdup (cs->callee->name ()),
340 cs->callee->order);
341 ipa_print_node_jump_functions_for_edge (f, cs);
342 }
343
344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
345 {
346 struct cgraph_indirect_call_info *ii;
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
349
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
352 fprintf (f, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
354 ii->member_ptr ? "member ptr" : "aggregate",
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
358 fprintf (f, " indirect %s callsite, calling param %i",
359 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
360
361 if (cs->call_stmt)
362 {
363 fprintf (f, ", for stmt ");
364 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
365 }
366 else
367 fprintf (f, "\n");
368 ipa_print_node_jump_functions_for_edge (f, cs);
369 }
370 }
371
372 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
373
374 void
375 ipa_print_all_jump_functions (FILE *f)
376 {
377 struct cgraph_node *node;
378
379 fprintf (f, "\nJump functions:\n");
380 FOR_EACH_FUNCTION (node)
381 {
382 ipa_print_node_jump_functions (f, node);
383 }
384 }
385
386 /* Set JFUNC to be a known type jump function. */
387
388 static void
389 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
390 tree base_type, tree component_type)
391 {
392 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
393 && TYPE_BINFO (component_type));
394 jfunc->type = IPA_JF_KNOWN_TYPE;
395 jfunc->value.known_type.offset = offset,
396 jfunc->value.known_type.base_type = base_type;
397 jfunc->value.known_type.component_type = component_type;
398 gcc_assert (component_type);
399 }
400
401 /* Set JFUNC to be a copy of another jmp (to be used by jump function
402 combination code). The two functions will share their rdesc. */
403
404 static void
405 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
406 struct ipa_jump_func *src)
407
408 {
409 gcc_checking_assert (src->type == IPA_JF_CONST);
410 dst->type = IPA_JF_CONST;
411 dst->value.constant = src->value.constant;
412 }
413
414 /* Set JFUNC to be a constant jmp function. */
415
416 static void
417 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
418 struct cgraph_edge *cs)
419 {
420 constant = unshare_expr (constant);
421 if (constant && EXPR_P (constant))
422 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
423 jfunc->type = IPA_JF_CONST;
424 jfunc->value.constant.value = unshare_expr_without_location (constant);
425
426 if (TREE_CODE (constant) == ADDR_EXPR
427 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
428 {
429 struct ipa_cst_ref_desc *rdesc;
430 if (!ipa_refdesc_pool)
431 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
432 sizeof (struct ipa_cst_ref_desc), 32);
433
434 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
435 rdesc->cs = cs;
436 rdesc->next_duplicate = NULL;
437 rdesc->refcount = 1;
438 jfunc->value.constant.rdesc = rdesc;
439 }
440 else
441 jfunc->value.constant.rdesc = NULL;
442 }
443
444 /* Set JFUNC to be a simple pass-through jump function. */
445 static void
446 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
447 bool agg_preserved, bool type_preserved)
448 {
449 jfunc->type = IPA_JF_PASS_THROUGH;
450 jfunc->value.pass_through.operand = NULL_TREE;
451 jfunc->value.pass_through.formal_id = formal_id;
452 jfunc->value.pass_through.operation = NOP_EXPR;
453 jfunc->value.pass_through.agg_preserved = agg_preserved;
454 jfunc->value.pass_through.type_preserved = type_preserved;
455 }
456
457 /* Set JFUNC to be an arithmetic pass through jump function. */
458
459 static void
460 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
461 tree operand, enum tree_code operation)
462 {
463 jfunc->type = IPA_JF_PASS_THROUGH;
464 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
465 jfunc->value.pass_through.formal_id = formal_id;
466 jfunc->value.pass_through.operation = operation;
467 jfunc->value.pass_through.agg_preserved = false;
468 jfunc->value.pass_through.type_preserved = false;
469 }
470
471 /* Set JFUNC to be an ancestor jump function. */
472
473 static void
474 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
475 tree type, int formal_id, bool agg_preserved,
476 bool type_preserved)
477 {
478 jfunc->type = IPA_JF_ANCESTOR;
479 jfunc->value.ancestor.formal_id = formal_id;
480 jfunc->value.ancestor.offset = offset;
481 jfunc->value.ancestor.type = type;
482 jfunc->value.ancestor.agg_preserved = agg_preserved;
483 jfunc->value.ancestor.type_preserved = type_preserved;
484 }
485
486 /* Extract the acual BINFO being described by JFUNC which must be a known type
487 jump function. */
488
489 tree
490 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
491 {
492 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
493 if (!base_binfo)
494 return NULL_TREE;
495 return get_binfo_at_offset (base_binfo,
496 jfunc->value.known_type.offset,
497 jfunc->value.known_type.component_type);
498 }
499
500 /* Structure to be passed in between detect_type_change and
501 check_stmt_for_type_change. */
502
503 struct type_change_info
504 {
505 /* Offset into the object where there is the virtual method pointer we are
506 looking for. */
507 HOST_WIDE_INT offset;
508 /* The declaration or SSA_NAME pointer of the base that we are checking for
509 type change. */
510 tree object;
511 /* If we actually can tell the type that the object has changed to, it is
512 stored in this field. Otherwise it remains NULL_TREE. */
513 tree known_current_type;
514 /* Set to true if dynamic type change has been detected. */
515 bool type_maybe_changed;
516 /* Set to true if multiple types have been encountered. known_current_type
517 must be disregarded in that case. */
518 bool multiple_types_encountered;
519 };
520
521 /* Return true if STMT can modify a virtual method table pointer.
522
523 This function makes special assumptions about both constructors and
524 destructors which are all the functions that are allowed to alter the VMT
525 pointers. It assumes that destructors begin with assignment into all VMT
526 pointers and that constructors essentially look in the following way:
527
528 1) The very first thing they do is that they call constructors of ancestor
529 sub-objects that have them.
530
531 2) Then VMT pointers of this and all its ancestors is set to new values
532 corresponding to the type corresponding to the constructor.
533
534 3) Only afterwards, other stuff such as constructor of member sub-objects
535 and the code written by the user is run. Only this may include calling
536 virtual functions, directly or indirectly.
537
538 There is no way to call a constructor of an ancestor sub-object in any
539 other way.
540
541 This means that we do not have to care whether constructors get the correct
542 type information because they will always change it (in fact, if we define
543 the type to be given by the VMT pointer, it is undefined).
544
545 The most important fact to derive from the above is that if, for some
546 statement in the section 3, we try to detect whether the dynamic type has
547 changed, we can safely ignore all calls as we examine the function body
548 backwards until we reach statements in section 2 because these calls cannot
549 be ancestor constructors or destructors (if the input is not bogus) and so
550 do not change the dynamic type (this holds true only for automatically
551 allocated objects but at the moment we devirtualize only these). We then
552 must detect that statements in section 2 change the dynamic type and can try
553 to derive the new type. That is enough and we can stop, we will never see
554 the calls into constructors of sub-objects in this code. Therefore we can
555 safely ignore all call statements that we traverse.
556 */
557
558 static bool
559 stmt_may_be_vtbl_ptr_store (gimple stmt)
560 {
561 if (is_gimple_call (stmt))
562 return false;
563 else if (gimple_clobber_p (stmt))
564 return false;
565 else if (is_gimple_assign (stmt))
566 {
567 tree lhs = gimple_assign_lhs (stmt);
568
569 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
570 {
571 if (flag_strict_aliasing
572 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
573 return false;
574
575 if (TREE_CODE (lhs) == COMPONENT_REF
576 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
577 return false;
578 /* In the future we might want to use get_base_ref_and_offset to find
579 if there is a field corresponding to the offset and if so, proceed
580 almost like if it was a component ref. */
581 }
582 }
583 return true;
584 }
585
586 /* If STMT can be proved to be an assignment to the virtual method table
587 pointer of ANALYZED_OBJ and the type associated with the new table
588 identified, return the type. Otherwise return NULL_TREE. */
589
590 static tree
591 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
592 {
593 HOST_WIDE_INT offset, size, max_size;
594 tree lhs, rhs, base, binfo;
595
596 if (!gimple_assign_single_p (stmt))
597 return NULL_TREE;
598
599 lhs = gimple_assign_lhs (stmt);
600 rhs = gimple_assign_rhs1 (stmt);
601 if (TREE_CODE (lhs) != COMPONENT_REF
602 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
603 return NULL_TREE;
604
605 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
606 if (offset != tci->offset
607 || size != POINTER_SIZE
608 || max_size != POINTER_SIZE)
609 return NULL_TREE;
610 if (TREE_CODE (base) == MEM_REF)
611 {
612 if (TREE_CODE (tci->object) != MEM_REF
613 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
614 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
615 TREE_OPERAND (base, 1)))
616 return NULL_TREE;
617 }
618 else if (tci->object != base)
619 return NULL_TREE;
620
621 binfo = vtable_pointer_value_to_binfo (rhs);
622
623 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
624 base of outer type. In this case we would need to either
625 work on binfos or translate it back to outer type and offset.
626 KNOWN_TYPE jump functions are not ready for that, yet. */
627 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
628 return NULL;
629
630 return BINFO_TYPE (binfo);
631 }
632
633 /* Callback of walk_aliased_vdefs and a helper function for
634 detect_type_change to check whether a particular statement may modify
635 the virtual table pointer, and if possible also determine the new type of
636 the (sub-)object. It stores its result into DATA, which points to a
637 type_change_info structure. */
638
639 static bool
640 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
641 {
642 gimple stmt = SSA_NAME_DEF_STMT (vdef);
643 struct type_change_info *tci = (struct type_change_info *) data;
644
645 if (stmt_may_be_vtbl_ptr_store (stmt))
646 {
647 tree type;
648 type = extr_type_from_vtbl_ptr_store (stmt, tci);
649 if (tci->type_maybe_changed
650 && type != tci->known_current_type)
651 tci->multiple_types_encountered = true;
652 tci->known_current_type = type;
653 tci->type_maybe_changed = true;
654 return true;
655 }
656 else
657 return false;
658 }
659
660
661
662 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
663 callsite CALL) by looking for assignments to its virtual table pointer. If
664 it is, return true and fill in the jump function JFUNC with relevant type
665 information or set it to unknown. ARG is the object itself (not a pointer
666 to it, unless dereferenced). BASE is the base of the memory access as
667 returned by get_ref_base_and_extent, as is the offset. */
668
669 static bool
670 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
671 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
672 {
673 struct type_change_info tci;
674 ao_ref ao;
675
676 gcc_checking_assert (DECL_P (arg)
677 || TREE_CODE (arg) == MEM_REF
678 || handled_component_p (arg));
679 /* Const calls cannot call virtual methods through VMT and so type changes do
680 not matter. */
681 if (!flag_devirtualize || !gimple_vuse (call)
682 /* Be sure expected_type is polymorphic. */
683 || !comp_type
684 || TREE_CODE (comp_type) != RECORD_TYPE
685 || !TYPE_BINFO (comp_type)
686 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
687 return false;
688
689 ao_ref_init (&ao, arg);
690 ao.base = base;
691 ao.offset = offset;
692 ao.size = POINTER_SIZE;
693 ao.max_size = ao.size;
694
695 tci.offset = offset;
696 tci.object = get_base_address (arg);
697 tci.known_current_type = NULL_TREE;
698 tci.type_maybe_changed = false;
699 tci.multiple_types_encountered = false;
700
701 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
702 &tci, NULL);
703 if (!tci.type_maybe_changed)
704 return false;
705
706 if (!tci.known_current_type
707 || tci.multiple_types_encountered
708 || offset != 0)
709 jfunc->type = IPA_JF_UNKNOWN;
710 else
711 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
712
713 return true;
714 }
715
716 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
717 SSA name (its dereference will become the base and the offset is assumed to
718 be zero). */
719
720 static bool
721 detect_type_change_ssa (tree arg, tree comp_type,
722 gimple call, struct ipa_jump_func *jfunc)
723 {
724 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
725 if (!flag_devirtualize
726 || !POINTER_TYPE_P (TREE_TYPE (arg)))
727 return false;
728
729 arg = build2 (MEM_REF, ptr_type_node, arg,
730 build_int_cst (ptr_type_node, 0));
731
732 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
733 }
734
735 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
736 boolean variable pointed to by DATA. */
737
738 static bool
739 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
740 void *data)
741 {
742 bool *b = (bool *) data;
743 *b = true;
744 return true;
745 }
746
747 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
748 a value known not to be modified in this function before reaching the
749 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
750 information about the parameter. */
751
752 static bool
753 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
754 gimple stmt, tree parm_load)
755 {
756 bool modified = false;
757 bitmap *visited_stmts;
758 ao_ref refd;
759
760 if (parm_ainfo && parm_ainfo->parm_modified)
761 return false;
762
763 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
764 ao_ref_init (&refd, parm_load);
765 /* We can cache visited statements only when parm_ainfo is available and when
766 we are looking at a naked load of the whole parameter. */
767 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
768 visited_stmts = NULL;
769 else
770 visited_stmts = &parm_ainfo->parm_visited_statements;
771 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
772 visited_stmts);
773 if (parm_ainfo && modified)
774 parm_ainfo->parm_modified = true;
775 return !modified;
776 }
777
778 /* If STMT is an assignment that loads a value from an parameter declaration,
779 return the index of the parameter in ipa_node_params which has not been
780 modified. Otherwise return -1. */
781
782 static int
783 load_from_unmodified_param (vec<ipa_param_descriptor> descriptors,
784 struct param_analysis_info *parms_ainfo,
785 gimple stmt)
786 {
787 int index;
788 tree op1;
789
790 if (!gimple_assign_single_p (stmt))
791 return -1;
792
793 op1 = gimple_assign_rhs1 (stmt);
794 if (TREE_CODE (op1) != PARM_DECL)
795 return -1;
796
797 index = ipa_get_param_decl_index_1 (descriptors, op1);
798 if (index < 0
799 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
800 : NULL, stmt, op1))
801 return -1;
802
803 return index;
804 }
805
806 /* Return true if memory reference REF loads data that are known to be
807 unmodified in this function before reaching statement STMT. PARM_AINFO, if
808 non-NULL, is a pointer to a structure containing temporary information about
809 PARM. */
810
811 static bool
812 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
813 gimple stmt, tree ref)
814 {
815 bool modified = false;
816 ao_ref refd;
817
818 gcc_checking_assert (gimple_vuse (stmt));
819 if (parm_ainfo && parm_ainfo->ref_modified)
820 return false;
821
822 ao_ref_init (&refd, ref);
823 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
824 NULL);
825 if (parm_ainfo && modified)
826 parm_ainfo->ref_modified = true;
827 return !modified;
828 }
829
830 /* Return true if the data pointed to by PARM is known to be unmodified in this
831 function before reaching call statement CALL into which it is passed.
832 PARM_AINFO is a pointer to a structure containing temporary information
833 about PARM. */
834
835 static bool
836 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
837 gimple call, tree parm)
838 {
839 bool modified = false;
840 ao_ref refd;
841
842 /* It's unnecessary to calculate anything about memory contnets for a const
843 function because it is not goin to use it. But do not cache the result
844 either. Also, no such calculations for non-pointers. */
845 if (!gimple_vuse (call)
846 || !POINTER_TYPE_P (TREE_TYPE (parm)))
847 return false;
848
849 if (parm_ainfo->pt_modified)
850 return false;
851
852 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
853 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
854 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
855 if (modified)
856 parm_ainfo->pt_modified = true;
857 return !modified;
858 }
859
860 /* Return true if we can prove that OP is a memory reference loading unmodified
861 data from an aggregate passed as a parameter and if the aggregate is passed
862 by reference, that the alias type of the load corresponds to the type of the
863 formal parameter (so that we can rely on this type for TBAA in callers).
864 INFO and PARMS_AINFO describe parameters of the current function (but the
865 latter can be NULL), STMT is the load statement. If function returns true,
866 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
867 within the aggregate and whether it is a load from a value passed by
868 reference respectively. */
869
870 static bool
871 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor> descriptors,
872 struct param_analysis_info *parms_ainfo, gimple stmt,
873 tree op, int *index_p, HOST_WIDE_INT *offset_p,
874 HOST_WIDE_INT *size_p, bool *by_ref_p)
875 {
876 int index;
877 HOST_WIDE_INT size, max_size;
878 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
879
880 if (max_size == -1 || max_size != size || *offset_p < 0)
881 return false;
882
883 if (DECL_P (base))
884 {
885 int index = ipa_get_param_decl_index_1 (descriptors, base);
886 if (index >= 0
887 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
888 : NULL, stmt, op))
889 {
890 *index_p = index;
891 *by_ref_p = false;
892 if (size_p)
893 *size_p = size;
894 return true;
895 }
896 return false;
897 }
898
899 if (TREE_CODE (base) != MEM_REF
900 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
901 || !integer_zerop (TREE_OPERAND (base, 1)))
902 return false;
903
904 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
905 {
906 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
907 index = ipa_get_param_decl_index_1 (descriptors, parm);
908 }
909 else
910 {
911 /* This branch catches situations where a pointer parameter is not a
912 gimple register, for example:
913
914 void hip7(S*) (struct S * p)
915 {
916 void (*<T2e4>) (struct S *) D.1867;
917 struct S * p.1;
918
919 <bb 2>:
920 p.1_1 = p;
921 D.1867_2 = p.1_1->f;
922 D.1867_2 ();
923 gdp = &p;
924 */
925
926 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
927 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
928 }
929
930 if (index >= 0
931 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
932 stmt, op))
933 {
934 *index_p = index;
935 *by_ref_p = true;
936 if (size_p)
937 *size_p = size;
938 return true;
939 }
940 return false;
941 }
942
943 /* Just like the previous function, just without the param_analysis_info
944 pointer, for users outside of this file. */
945
946 bool
947 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
948 tree op, int *index_p, HOST_WIDE_INT *offset_p,
949 bool *by_ref_p)
950 {
951 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
952 offset_p, NULL, by_ref_p);
953 }
954
955 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
956 of an assignment statement STMT, try to determine whether we are actually
957 handling any of the following cases and construct an appropriate jump
958 function into JFUNC if so:
959
960 1) The passed value is loaded from a formal parameter which is not a gimple
961 register (most probably because it is addressable, the value has to be
962 scalar) and we can guarantee the value has not changed. This case can
963 therefore be described by a simple pass-through jump function. For example:
964
965 foo (int a)
966 {
967 int a.0;
968
969 a.0_2 = a;
970 bar (a.0_2);
971
972 2) The passed value can be described by a simple arithmetic pass-through
973 jump function. E.g.
974
975 foo (int a)
976 {
977 int D.2064;
978
979 D.2064_4 = a.1(D) + 4;
980 bar (D.2064_4);
981
982 This case can also occur in combination of the previous one, e.g.:
983
984 foo (int a, int z)
985 {
986 int a.0;
987 int D.2064;
988
989 a.0_3 = a;
990 D.2064_4 = a.0_3 + 4;
991 foo (D.2064_4);
992
993 3) The passed value is an address of an object within another one (which
994 also passed by reference). Such situations are described by an ancestor
995 jump function and describe situations such as:
996
997 B::foo() (struct B * const this)
998 {
999 struct A * D.1845;
1000
1001 D.1845_2 = &this_1(D)->D.1748;
1002 A::bar (D.1845_2);
1003
1004 INFO is the structure describing individual parameters access different
1005 stages of IPA optimizations. PARMS_AINFO contains the information that is
1006 only needed for intraprocedural analysis. */
1007
1008 static void
1009 compute_complex_assign_jump_func (struct ipa_node_params *info,
1010 struct param_analysis_info *parms_ainfo,
1011 struct ipa_jump_func *jfunc,
1012 gimple call, gimple stmt, tree name,
1013 tree param_type)
1014 {
1015 HOST_WIDE_INT offset, size, max_size;
1016 tree op1, tc_ssa, base, ssa;
1017 int index;
1018
1019 op1 = gimple_assign_rhs1 (stmt);
1020
1021 if (TREE_CODE (op1) == SSA_NAME)
1022 {
1023 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1024 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1025 else
1026 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
1027 SSA_NAME_DEF_STMT (op1));
1028 tc_ssa = op1;
1029 }
1030 else
1031 {
1032 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1033 tc_ssa = gimple_assign_lhs (stmt);
1034 }
1035
1036 if (index >= 0)
1037 {
1038 tree op2 = gimple_assign_rhs2 (stmt);
1039
1040 if (op2)
1041 {
1042 if (!is_gimple_ip_invariant (op2)
1043 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1044 && !useless_type_conversion_p (TREE_TYPE (name),
1045 TREE_TYPE (op1))))
1046 return;
1047
1048 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1049 gimple_assign_rhs_code (stmt));
1050 }
1051 else if (gimple_assign_single_p (stmt))
1052 {
1053 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1054 call, tc_ssa);
1055 bool type_p = false;
1056
1057 if (param_type && POINTER_TYPE_P (param_type))
1058 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1059 call, jfunc);
1060 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1061 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1062 }
1063 return;
1064 }
1065
1066 if (TREE_CODE (op1) != ADDR_EXPR)
1067 return;
1068 op1 = TREE_OPERAND (op1, 0);
1069 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1070 return;
1071 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1072 if (TREE_CODE (base) != MEM_REF
1073 /* If this is a varying address, punt. */
1074 || max_size == -1
1075 || max_size != size)
1076 return;
1077 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1078 ssa = TREE_OPERAND (base, 0);
1079 if (TREE_CODE (ssa) != SSA_NAME
1080 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1081 || offset < 0)
1082 return;
1083
1084 /* Dynamic types are changed in constructors and destructors. */
1085 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1086 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1087 {
1088 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1089 call, jfunc, offset);
1090 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1091 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1092 parm_ref_data_pass_through_p (&parms_ainfo[index],
1093 call, ssa), type_p);
1094 }
1095 }
1096
1097 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1098 it looks like:
1099
1100 iftmp.1_3 = &obj_2(D)->D.1762;
1101
1102 The base of the MEM_REF must be a default definition SSA NAME of a
1103 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1104 whole MEM_REF expression is returned and the offset calculated from any
1105 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1106 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1107
1108 static tree
1109 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1110 {
1111 HOST_WIDE_INT size, max_size;
1112 tree expr, parm, obj;
1113
1114 if (!gimple_assign_single_p (assign))
1115 return NULL_TREE;
1116 expr = gimple_assign_rhs1 (assign);
1117
1118 if (TREE_CODE (expr) != ADDR_EXPR)
1119 return NULL_TREE;
1120 expr = TREE_OPERAND (expr, 0);
1121 obj = expr;
1122 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1123
1124 if (TREE_CODE (expr) != MEM_REF
1125 /* If this is a varying address, punt. */
1126 || max_size == -1
1127 || max_size != size
1128 || *offset < 0)
1129 return NULL_TREE;
1130 parm = TREE_OPERAND (expr, 0);
1131 if (TREE_CODE (parm) != SSA_NAME
1132 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1133 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1134 return NULL_TREE;
1135
1136 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1137 *obj_p = obj;
1138 return expr;
1139 }
1140
1141
1142 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1143 statement PHI, try to find out whether NAME is in fact a
1144 multiple-inheritance typecast from a descendant into an ancestor of a formal
1145 parameter and thus can be described by an ancestor jump function and if so,
1146 write the appropriate function into JFUNC.
1147
1148 Essentially we want to match the following pattern:
1149
1150 if (obj_2(D) != 0B)
1151 goto <bb 3>;
1152 else
1153 goto <bb 4>;
1154
1155 <bb 3>:
1156 iftmp.1_3 = &obj_2(D)->D.1762;
1157
1158 <bb 4>:
1159 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1160 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1161 return D.1879_6; */
1162
1163 static void
1164 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1165 struct param_analysis_info *parms_ainfo,
1166 struct ipa_jump_func *jfunc,
1167 gimple call, gimple phi, tree param_type)
1168 {
1169 HOST_WIDE_INT offset;
1170 gimple assign, cond;
1171 basic_block phi_bb, assign_bb, cond_bb;
1172 tree tmp, parm, expr, obj;
1173 int index, i;
1174
1175 if (gimple_phi_num_args (phi) != 2)
1176 return;
1177
1178 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1179 tmp = PHI_ARG_DEF (phi, 0);
1180 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1181 tmp = PHI_ARG_DEF (phi, 1);
1182 else
1183 return;
1184 if (TREE_CODE (tmp) != SSA_NAME
1185 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1186 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1187 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1188 return;
1189
1190 assign = SSA_NAME_DEF_STMT (tmp);
1191 assign_bb = gimple_bb (assign);
1192 if (!single_pred_p (assign_bb))
1193 return;
1194 expr = get_ancestor_addr_info (assign, &obj, &offset);
1195 if (!expr)
1196 return;
1197 parm = TREE_OPERAND (expr, 0);
1198 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1199 gcc_assert (index >= 0);
1200
1201 cond_bb = single_pred (assign_bb);
1202 cond = last_stmt (cond_bb);
1203 if (!cond
1204 || gimple_code (cond) != GIMPLE_COND
1205 || gimple_cond_code (cond) != NE_EXPR
1206 || gimple_cond_lhs (cond) != parm
1207 || !integer_zerop (gimple_cond_rhs (cond)))
1208 return;
1209
1210 phi_bb = gimple_bb (phi);
1211 for (i = 0; i < 2; i++)
1212 {
1213 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1214 if (pred != assign_bb && pred != cond_bb)
1215 return;
1216 }
1217
1218 bool type_p = false;
1219 if (param_type && POINTER_TYPE_P (param_type))
1220 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1221 call, jfunc, offset);
1222 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1223 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1224 parm_ref_data_pass_through_p (&parms_ainfo[index],
1225 call, parm), type_p);
1226 }
1227
1228 /* Given OP which is passed as an actual argument to a called function,
1229 determine if it is possible to construct a KNOWN_TYPE jump function for it
1230 and if so, create one and store it to JFUNC.
1231 EXPECTED_TYPE represents a type the argument should be in */
1232
1233 static void
1234 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1235 gimple call, tree expected_type)
1236 {
1237 HOST_WIDE_INT offset, size, max_size;
1238 tree base;
1239
1240 if (!flag_devirtualize
1241 || TREE_CODE (op) != ADDR_EXPR
1242 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1243 /* Be sure expected_type is polymorphic. */
1244 || !expected_type
1245 || TREE_CODE (expected_type) != RECORD_TYPE
1246 || !TYPE_BINFO (expected_type)
1247 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1248 return;
1249
1250 op = TREE_OPERAND (op, 0);
1251 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1252 if (!DECL_P (base)
1253 || max_size == -1
1254 || max_size != size
1255 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1256 || is_global_var (base))
1257 return;
1258
1259 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1260 return;
1261
1262 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1263 expected_type);
1264 }
1265
1266 /* Inspect the given TYPE and return true iff it has the same structure (the
1267 same number of fields of the same types) as a C++ member pointer. If
1268 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1269 corresponding fields there. */
1270
1271 static bool
1272 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1273 {
1274 tree fld;
1275
1276 if (TREE_CODE (type) != RECORD_TYPE)
1277 return false;
1278
1279 fld = TYPE_FIELDS (type);
1280 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1281 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1282 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1283 return false;
1284
1285 if (method_ptr)
1286 *method_ptr = fld;
1287
1288 fld = DECL_CHAIN (fld);
1289 if (!fld || INTEGRAL_TYPE_P (fld)
1290 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1291 return false;
1292 if (delta)
1293 *delta = fld;
1294
1295 if (DECL_CHAIN (fld))
1296 return false;
1297
1298 return true;
1299 }
1300
1301 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1302 return the rhs of its defining statement. Otherwise return RHS as it
1303 is. */
1304
1305 static inline tree
1306 get_ssa_def_if_simple_copy (tree rhs)
1307 {
1308 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1309 {
1310 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1311
1312 if (gimple_assign_single_p (def_stmt))
1313 rhs = gimple_assign_rhs1 (def_stmt);
1314 else
1315 break;
1316 }
1317 return rhs;
1318 }
1319
1320 /* Simple linked list, describing known contents of an aggregate beforere
1321 call. */
1322
1323 struct ipa_known_agg_contents_list
1324 {
1325 /* Offset and size of the described part of the aggregate. */
1326 HOST_WIDE_INT offset, size;
1327 /* Known constant value or NULL if the contents is known to be unknown. */
1328 tree constant;
1329 /* Pointer to the next structure in the list. */
1330 struct ipa_known_agg_contents_list *next;
1331 };
1332
1333 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1334 in ARG is filled in with constant values. ARG can either be an aggregate
1335 expression or a pointer to an aggregate. JFUNC is the jump function into
1336 which the constants are subsequently stored. */
1337
1338 static void
1339 determine_known_aggregate_parts (gimple call, tree arg,
1340 struct ipa_jump_func *jfunc)
1341 {
1342 struct ipa_known_agg_contents_list *list = NULL;
1343 int item_count = 0, const_count = 0;
1344 HOST_WIDE_INT arg_offset, arg_size;
1345 gimple_stmt_iterator gsi;
1346 tree arg_base;
1347 bool check_ref, by_ref;
1348 ao_ref r;
1349
1350 /* The function operates in three stages. First, we prepare check_ref, r,
1351 arg_base and arg_offset based on what is actually passed as an actual
1352 argument. */
1353
1354 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1355 {
1356 by_ref = true;
1357 if (TREE_CODE (arg) == SSA_NAME)
1358 {
1359 tree type_size;
1360 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)))))
1361 return;
1362 check_ref = true;
1363 arg_base = arg;
1364 arg_offset = 0;
1365 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1366 arg_size = tree_to_uhwi (type_size);
1367 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1368 }
1369 else if (TREE_CODE (arg) == ADDR_EXPR)
1370 {
1371 HOST_WIDE_INT arg_max_size;
1372
1373 arg = TREE_OPERAND (arg, 0);
1374 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1375 &arg_max_size);
1376 if (arg_max_size == -1
1377 || arg_max_size != arg_size
1378 || arg_offset < 0)
1379 return;
1380 if (DECL_P (arg_base))
1381 {
1382 tree size;
1383 check_ref = false;
1384 size = build_int_cst (integer_type_node, arg_size);
1385 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1386 }
1387 else
1388 return;
1389 }
1390 else
1391 return;
1392 }
1393 else
1394 {
1395 HOST_WIDE_INT arg_max_size;
1396
1397 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1398
1399 by_ref = false;
1400 check_ref = false;
1401 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1402 &arg_max_size);
1403 if (arg_max_size == -1
1404 || arg_max_size != arg_size
1405 || arg_offset < 0)
1406 return;
1407
1408 ao_ref_init (&r, arg);
1409 }
1410
1411 /* Second stage walks back the BB, looks at individual statements and as long
1412 as it is confident of how the statements affect contents of the
1413 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1414 describing it. */
1415 gsi = gsi_for_stmt (call);
1416 gsi_prev (&gsi);
1417 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1418 {
1419 struct ipa_known_agg_contents_list *n, **p;
1420 gimple stmt = gsi_stmt (gsi);
1421 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1422 tree lhs, rhs, lhs_base;
1423 bool partial_overlap;
1424
1425 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1426 continue;
1427 if (!gimple_assign_single_p (stmt))
1428 break;
1429
1430 lhs = gimple_assign_lhs (stmt);
1431 rhs = gimple_assign_rhs1 (stmt);
1432 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1433 || TREE_CODE (lhs) == BIT_FIELD_REF
1434 || contains_bitfld_component_ref_p (lhs))
1435 break;
1436
1437 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1438 &lhs_max_size);
1439 if (lhs_max_size == -1
1440 || lhs_max_size != lhs_size
1441 || (lhs_offset < arg_offset
1442 && lhs_offset + lhs_size > arg_offset)
1443 || (lhs_offset < arg_offset + arg_size
1444 && lhs_offset + lhs_size > arg_offset + arg_size))
1445 break;
1446
1447 if (check_ref)
1448 {
1449 if (TREE_CODE (lhs_base) != MEM_REF
1450 || TREE_OPERAND (lhs_base, 0) != arg_base
1451 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1452 break;
1453 }
1454 else if (lhs_base != arg_base)
1455 {
1456 if (DECL_P (lhs_base))
1457 continue;
1458 else
1459 break;
1460 }
1461
1462 if (lhs_offset + lhs_size < arg_offset
1463 || lhs_offset >= (arg_offset + arg_size))
1464 continue;
1465
1466 partial_overlap = false;
1467 p = &list;
1468 while (*p && (*p)->offset < lhs_offset)
1469 {
1470 if ((*p)->offset + (*p)->size > lhs_offset)
1471 {
1472 partial_overlap = true;
1473 break;
1474 }
1475 p = &(*p)->next;
1476 }
1477 if (partial_overlap)
1478 break;
1479 if (*p && (*p)->offset < lhs_offset + lhs_size)
1480 {
1481 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1482 /* We already know this value is subsequently overwritten with
1483 something else. */
1484 continue;
1485 else
1486 /* Otherwise this is a partial overlap which we cannot
1487 represent. */
1488 break;
1489 }
1490
1491 rhs = get_ssa_def_if_simple_copy (rhs);
1492 n = XALLOCA (struct ipa_known_agg_contents_list);
1493 n->size = lhs_size;
1494 n->offset = lhs_offset;
1495 if (is_gimple_ip_invariant (rhs))
1496 {
1497 n->constant = rhs;
1498 const_count++;
1499 }
1500 else
1501 n->constant = NULL_TREE;
1502 n->next = *p;
1503 *p = n;
1504
1505 item_count++;
1506 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1507 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1508 break;
1509 }
1510
1511 /* Third stage just goes over the list and creates an appropriate vector of
1512 ipa_agg_jf_item structures out of it, of sourse only if there are
1513 any known constants to begin with. */
1514
1515 if (const_count)
1516 {
1517 jfunc->agg.by_ref = by_ref;
1518 vec_alloc (jfunc->agg.items, const_count);
1519 while (list)
1520 {
1521 if (list->constant)
1522 {
1523 struct ipa_agg_jf_item item;
1524 item.offset = list->offset - arg_offset;
1525 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1526 item.value = unshare_expr_without_location (list->constant);
1527 jfunc->agg.items->quick_push (item);
1528 }
1529 list = list->next;
1530 }
1531 }
1532 }
1533
1534 static tree
1535 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1536 {
1537 int n;
1538 tree type = (e->callee
1539 ? TREE_TYPE (e->callee->decl)
1540 : gimple_call_fntype (e->call_stmt));
1541 tree t = TYPE_ARG_TYPES (type);
1542
1543 for (n = 0; n < i; n++)
1544 {
1545 if (!t)
1546 break;
1547 t = TREE_CHAIN (t);
1548 }
1549 if (t)
1550 return TREE_VALUE (t);
1551 if (!e->callee)
1552 return NULL;
1553 t = DECL_ARGUMENTS (e->callee->decl);
1554 for (n = 0; n < i; n++)
1555 {
1556 if (!t)
1557 return NULL;
1558 t = TREE_CHAIN (t);
1559 }
1560 if (t)
1561 return TREE_TYPE (t);
1562 return NULL;
1563 }
1564
1565 /* Compute jump function for all arguments of callsite CS and insert the
1566 information in the jump_functions array in the ipa_edge_args corresponding
1567 to this callsite. */
1568
1569 static void
1570 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1571 struct cgraph_edge *cs)
1572 {
1573 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1574 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1575 gimple call = cs->call_stmt;
1576 int n, arg_num = gimple_call_num_args (call);
1577
1578 if (arg_num == 0 || args->jump_functions)
1579 return;
1580 vec_safe_grow_cleared (args->jump_functions, arg_num);
1581
1582 if (gimple_call_internal_p (call))
1583 return;
1584 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1585 return;
1586
1587 for (n = 0; n < arg_num; n++)
1588 {
1589 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1590 tree arg = gimple_call_arg (call, n);
1591 tree param_type = ipa_get_callee_param_type (cs, n);
1592
1593 if (is_gimple_ip_invariant (arg))
1594 ipa_set_jf_constant (jfunc, arg, cs);
1595 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1596 && TREE_CODE (arg) == PARM_DECL)
1597 {
1598 int index = ipa_get_param_decl_index (info, arg);
1599
1600 gcc_assert (index >=0);
1601 /* Aggregate passed by value, check for pass-through, otherwise we
1602 will attempt to fill in aggregate contents later in this
1603 for cycle. */
1604 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1605 {
1606 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1607 continue;
1608 }
1609 }
1610 else if (TREE_CODE (arg) == SSA_NAME)
1611 {
1612 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1613 {
1614 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1615 if (index >= 0)
1616 {
1617 bool agg_p, type_p;
1618 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1619 call, arg);
1620 if (param_type && POINTER_TYPE_P (param_type))
1621 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1622 call, jfunc);
1623 else
1624 type_p = false;
1625 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1626 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1627 type_p);
1628 }
1629 }
1630 else
1631 {
1632 gimple stmt = SSA_NAME_DEF_STMT (arg);
1633 if (is_gimple_assign (stmt))
1634 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1635 call, stmt, arg, param_type);
1636 else if (gimple_code (stmt) == GIMPLE_PHI)
1637 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1638 call, stmt, param_type);
1639 }
1640 }
1641 else
1642 compute_known_type_jump_func (arg, jfunc, call,
1643 param_type
1644 && POINTER_TYPE_P (param_type)
1645 ? TREE_TYPE (param_type)
1646 : NULL);
1647
1648 if ((jfunc->type != IPA_JF_PASS_THROUGH
1649 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1650 && (jfunc->type != IPA_JF_ANCESTOR
1651 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1652 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1653 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1654 determine_known_aggregate_parts (call, arg, jfunc);
1655 }
1656 }
1657
1658 /* Compute jump functions for all edges - both direct and indirect - outgoing
1659 from NODE. Also count the actual arguments in the process. */
1660
1661 static void
1662 ipa_compute_jump_functions (struct cgraph_node *node,
1663 struct param_analysis_info *parms_ainfo)
1664 {
1665 struct cgraph_edge *cs;
1666
1667 for (cs = node->callees; cs; cs = cs->next_callee)
1668 {
1669 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1670 NULL);
1671 /* We do not need to bother analyzing calls to unknown
1672 functions unless they may become known during lto/whopr. */
1673 if (!callee->definition && !flag_lto)
1674 continue;
1675 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1676 }
1677
1678 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1679 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1680 }
1681
1682 /* If STMT looks like a statement loading a value from a member pointer formal
1683 parameter, return that parameter and store the offset of the field to
1684 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1685 might be clobbered). If USE_DELTA, then we look for a use of the delta
1686 field rather than the pfn. */
1687
1688 static tree
1689 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1690 HOST_WIDE_INT *offset_p)
1691 {
1692 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1693
1694 if (!gimple_assign_single_p (stmt))
1695 return NULL_TREE;
1696
1697 rhs = gimple_assign_rhs1 (stmt);
1698 if (TREE_CODE (rhs) == COMPONENT_REF)
1699 {
1700 ref_field = TREE_OPERAND (rhs, 1);
1701 rhs = TREE_OPERAND (rhs, 0);
1702 }
1703 else
1704 ref_field = NULL_TREE;
1705 if (TREE_CODE (rhs) != MEM_REF)
1706 return NULL_TREE;
1707 rec = TREE_OPERAND (rhs, 0);
1708 if (TREE_CODE (rec) != ADDR_EXPR)
1709 return NULL_TREE;
1710 rec = TREE_OPERAND (rec, 0);
1711 if (TREE_CODE (rec) != PARM_DECL
1712 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1713 return NULL_TREE;
1714 ref_offset = TREE_OPERAND (rhs, 1);
1715
1716 if (use_delta)
1717 fld = delta_field;
1718 else
1719 fld = ptr_field;
1720 if (offset_p)
1721 *offset_p = int_bit_position (fld);
1722
1723 if (ref_field)
1724 {
1725 if (integer_nonzerop (ref_offset))
1726 return NULL_TREE;
1727 return ref_field == fld ? rec : NULL_TREE;
1728 }
1729 else
1730 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1731 : NULL_TREE;
1732 }
1733
1734 /* Returns true iff T is an SSA_NAME defined by a statement. */
1735
1736 static bool
1737 ipa_is_ssa_with_stmt_def (tree t)
1738 {
1739 if (TREE_CODE (t) == SSA_NAME
1740 && !SSA_NAME_IS_DEFAULT_DEF (t))
1741 return true;
1742 else
1743 return false;
1744 }
1745
1746 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1747 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1748 indirect call graph edge. */
1749
1750 static struct cgraph_edge *
1751 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1752 {
1753 struct cgraph_edge *cs;
1754
1755 cs = cgraph_edge (node, stmt);
1756 cs->indirect_info->param_index = param_index;
1757 cs->indirect_info->agg_contents = 0;
1758 cs->indirect_info->member_ptr = 0;
1759 return cs;
1760 }
1761
1762 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1763 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1764 intermediate information about each formal parameter. Currently it checks
1765 whether the call calls a pointer that is a formal parameter and if so, the
1766 parameter is marked with the called flag and an indirect call graph edge
1767 describing the call is created. This is very simple for ordinary pointers
1768 represented in SSA but not-so-nice when it comes to member pointers. The
1769 ugly part of this function does nothing more than trying to match the
1770 pattern of such a call. An example of such a pattern is the gimple dump
1771 below, the call is on the last line:
1772
1773 <bb 2>:
1774 f$__delta_5 = f.__delta;
1775 f$__pfn_24 = f.__pfn;
1776
1777 or
1778 <bb 2>:
1779 f$__delta_5 = MEM[(struct *)&f];
1780 f$__pfn_24 = MEM[(struct *)&f + 4B];
1781
1782 and a few lines below:
1783
1784 <bb 5>
1785 D.2496_3 = (int) f$__pfn_24;
1786 D.2497_4 = D.2496_3 & 1;
1787 if (D.2497_4 != 0)
1788 goto <bb 3>;
1789 else
1790 goto <bb 4>;
1791
1792 <bb 6>:
1793 D.2500_7 = (unsigned int) f$__delta_5;
1794 D.2501_8 = &S + D.2500_7;
1795 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1796 D.2503_10 = *D.2502_9;
1797 D.2504_12 = f$__pfn_24 + -1;
1798 D.2505_13 = (unsigned int) D.2504_12;
1799 D.2506_14 = D.2503_10 + D.2505_13;
1800 D.2507_15 = *D.2506_14;
1801 iftmp.11_16 = (String:: *) D.2507_15;
1802
1803 <bb 7>:
1804 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1805 D.2500_19 = (unsigned int) f$__delta_5;
1806 D.2508_20 = &S + D.2500_19;
1807 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1808
1809 Such patterns are results of simple calls to a member pointer:
1810
1811 int doprinting (int (MyString::* f)(int) const)
1812 {
1813 MyString S ("somestring");
1814
1815 return (S.*f)(4);
1816 }
1817
1818 Moreover, the function also looks for called pointers loaded from aggregates
1819 passed by value or reference. */
1820
1821 static void
1822 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1823 struct ipa_node_params *info,
1824 struct param_analysis_info *parms_ainfo,
1825 gimple call, tree target)
1826 {
1827 gimple def;
1828 tree n1, n2;
1829 gimple d1, d2;
1830 tree rec, rec2, cond;
1831 gimple branch;
1832 int index;
1833 basic_block bb, virt_bb, join;
1834 HOST_WIDE_INT offset;
1835 bool by_ref;
1836
1837 if (SSA_NAME_IS_DEFAULT_DEF (target))
1838 {
1839 tree var = SSA_NAME_VAR (target);
1840 index = ipa_get_param_decl_index (info, var);
1841 if (index >= 0)
1842 ipa_note_param_call (node, index, call);
1843 return;
1844 }
1845
1846 def = SSA_NAME_DEF_STMT (target);
1847 if (gimple_assign_single_p (def)
1848 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1849 gimple_assign_rhs1 (def), &index, &offset,
1850 NULL, &by_ref))
1851 {
1852 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1853 if (cs->indirect_info->offset != offset)
1854 cs->indirect_info->outer_type = NULL;
1855 cs->indirect_info->offset = offset;
1856 cs->indirect_info->agg_contents = 1;
1857 cs->indirect_info->by_ref = by_ref;
1858 return;
1859 }
1860
1861 /* Now we need to try to match the complex pattern of calling a member
1862 pointer. */
1863 if (gimple_code (def) != GIMPLE_PHI
1864 || gimple_phi_num_args (def) != 2
1865 || !POINTER_TYPE_P (TREE_TYPE (target))
1866 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1867 return;
1868
1869 /* First, we need to check whether one of these is a load from a member
1870 pointer that is a parameter to this function. */
1871 n1 = PHI_ARG_DEF (def, 0);
1872 n2 = PHI_ARG_DEF (def, 1);
1873 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1874 return;
1875 d1 = SSA_NAME_DEF_STMT (n1);
1876 d2 = SSA_NAME_DEF_STMT (n2);
1877
1878 join = gimple_bb (def);
1879 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1880 {
1881 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1882 return;
1883
1884 bb = EDGE_PRED (join, 0)->src;
1885 virt_bb = gimple_bb (d2);
1886 }
1887 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1888 {
1889 bb = EDGE_PRED (join, 1)->src;
1890 virt_bb = gimple_bb (d1);
1891 }
1892 else
1893 return;
1894
1895 /* Second, we need to check that the basic blocks are laid out in the way
1896 corresponding to the pattern. */
1897
1898 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1899 || single_pred (virt_bb) != bb
1900 || single_succ (virt_bb) != join)
1901 return;
1902
1903 /* Third, let's see that the branching is done depending on the least
1904 significant bit of the pfn. */
1905
1906 branch = last_stmt (bb);
1907 if (!branch || gimple_code (branch) != GIMPLE_COND)
1908 return;
1909
1910 if ((gimple_cond_code (branch) != NE_EXPR
1911 && gimple_cond_code (branch) != EQ_EXPR)
1912 || !integer_zerop (gimple_cond_rhs (branch)))
1913 return;
1914
1915 cond = gimple_cond_lhs (branch);
1916 if (!ipa_is_ssa_with_stmt_def (cond))
1917 return;
1918
1919 def = SSA_NAME_DEF_STMT (cond);
1920 if (!is_gimple_assign (def)
1921 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1922 || !integer_onep (gimple_assign_rhs2 (def)))
1923 return;
1924
1925 cond = gimple_assign_rhs1 (def);
1926 if (!ipa_is_ssa_with_stmt_def (cond))
1927 return;
1928
1929 def = SSA_NAME_DEF_STMT (cond);
1930
1931 if (is_gimple_assign (def)
1932 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1933 {
1934 cond = gimple_assign_rhs1 (def);
1935 if (!ipa_is_ssa_with_stmt_def (cond))
1936 return;
1937 def = SSA_NAME_DEF_STMT (cond);
1938 }
1939
1940 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1941 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1942 == ptrmemfunc_vbit_in_delta),
1943 NULL);
1944 if (rec != rec2)
1945 return;
1946
1947 index = ipa_get_param_decl_index (info, rec);
1948 if (index >= 0
1949 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1950 {
1951 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1952 if (cs->indirect_info->offset != offset)
1953 cs->indirect_info->outer_type = NULL;
1954 cs->indirect_info->offset = offset;
1955 cs->indirect_info->agg_contents = 1;
1956 cs->indirect_info->member_ptr = 1;
1957 }
1958
1959 return;
1960 }
1961
1962 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1963 object referenced in the expression is a formal parameter of the caller
1964 (described by INFO), create a call note for the statement. */
1965
1966 static void
1967 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1968 struct ipa_node_params *info, gimple call,
1969 tree target)
1970 {
1971 struct cgraph_edge *cs;
1972 struct cgraph_indirect_call_info *ii;
1973 struct ipa_jump_func jfunc;
1974 tree obj = OBJ_TYPE_REF_OBJECT (target);
1975 int index;
1976 HOST_WIDE_INT anc_offset;
1977
1978 if (!flag_devirtualize)
1979 return;
1980
1981 if (TREE_CODE (obj) != SSA_NAME)
1982 return;
1983
1984 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1985 {
1986 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1987 return;
1988
1989 anc_offset = 0;
1990 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1991 gcc_assert (index >= 0);
1992 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1993 call, &jfunc))
1994 return;
1995 }
1996 else
1997 {
1998 gimple stmt = SSA_NAME_DEF_STMT (obj);
1999 tree expr;
2000
2001 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2002 if (!expr)
2003 return;
2004 index = ipa_get_param_decl_index (info,
2005 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2006 gcc_assert (index >= 0);
2007 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2008 call, &jfunc, anc_offset))
2009 return;
2010 }
2011
2012 cs = ipa_note_param_call (node, index, call);
2013 ii = cs->indirect_info;
2014 ii->offset = anc_offset;
2015 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2016 ii->otr_type = obj_type_ref_class (target);
2017 ii->polymorphic = 1;
2018 }
2019
2020 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2021 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2022 containing intermediate information about each formal parameter. */
2023
2024 static void
2025 ipa_analyze_call_uses (struct cgraph_node *node,
2026 struct ipa_node_params *info,
2027 struct param_analysis_info *parms_ainfo, gimple call)
2028 {
2029 tree target = gimple_call_fn (call);
2030 struct cgraph_edge *cs;
2031
2032 if (!target
2033 || (TREE_CODE (target) != SSA_NAME
2034 && !virtual_method_call_p (target)))
2035 return;
2036
2037 /* If we previously turned the call into a direct call, there is
2038 no need to analyze. */
2039 cs = cgraph_edge (node, call);
2040 if (cs && !cs->indirect_unknown_callee)
2041 return;
2042 if (TREE_CODE (target) == SSA_NAME)
2043 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2044 else if (virtual_method_call_p (target))
2045 ipa_analyze_virtual_call_uses (node, info, call, target);
2046 }
2047
2048
2049 /* Analyze the call statement STMT with respect to formal parameters (described
2050 in INFO) of caller given by NODE. Currently it only checks whether formal
2051 parameters are called. PARMS_AINFO is a pointer to a vector containing
2052 intermediate information about each formal parameter. */
2053
2054 static void
2055 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2056 struct param_analysis_info *parms_ainfo, gimple stmt)
2057 {
2058 if (is_gimple_call (stmt))
2059 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2060 }
2061
2062 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2063 If OP is a parameter declaration, mark it as used in the info structure
2064 passed in DATA. */
2065
2066 static bool
2067 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2068 {
2069 struct ipa_node_params *info = (struct ipa_node_params *) data;
2070
2071 op = get_base_address (op);
2072 if (op
2073 && TREE_CODE (op) == PARM_DECL)
2074 {
2075 int index = ipa_get_param_decl_index (info, op);
2076 gcc_assert (index >= 0);
2077 ipa_set_param_used (info, index, true);
2078 }
2079
2080 return false;
2081 }
2082
2083 /* Scan the function body of NODE and inspect the uses of formal parameters.
2084 Store the findings in various structures of the associated ipa_node_params
2085 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2086 vector containing intermediate information about each formal parameter. */
2087
2088 static void
2089 ipa_analyze_params_uses (struct cgraph_node *node,
2090 struct param_analysis_info *parms_ainfo)
2091 {
2092 tree decl = node->decl;
2093 basic_block bb;
2094 struct function *func;
2095 gimple_stmt_iterator gsi;
2096 struct ipa_node_params *info = IPA_NODE_REF (node);
2097 int i;
2098
2099 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2100 return;
2101
2102 info->uses_analysis_done = 1;
2103 if (ipa_func_spec_opts_forbid_analysis_p (node))
2104 {
2105 for (i = 0; i < ipa_get_param_count (info); i++)
2106 {
2107 ipa_set_param_used (info, i, true);
2108 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2109 }
2110 return;
2111 }
2112
2113 for (i = 0; i < ipa_get_param_count (info); i++)
2114 {
2115 tree parm = ipa_get_param (info, i);
2116 int controlled_uses = 0;
2117
2118 /* For SSA regs see if parameter is used. For non-SSA we compute
2119 the flag during modification analysis. */
2120 if (is_gimple_reg (parm))
2121 {
2122 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2123 parm);
2124 if (ddef && !has_zero_uses (ddef))
2125 {
2126 imm_use_iterator imm_iter;
2127 use_operand_p use_p;
2128
2129 ipa_set_param_used (info, i, true);
2130 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2131 if (!is_gimple_call (USE_STMT (use_p)))
2132 {
2133 if (!is_gimple_debug (USE_STMT (use_p)))
2134 {
2135 controlled_uses = IPA_UNDESCRIBED_USE;
2136 break;
2137 }
2138 }
2139 else
2140 controlled_uses++;
2141 }
2142 else
2143 controlled_uses = 0;
2144 }
2145 else
2146 controlled_uses = IPA_UNDESCRIBED_USE;
2147 ipa_set_controlled_uses (info, i, controlled_uses);
2148 }
2149
2150 func = DECL_STRUCT_FUNCTION (decl);
2151 FOR_EACH_BB_FN (bb, func)
2152 {
2153 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2154 {
2155 gimple stmt = gsi_stmt (gsi);
2156
2157 if (is_gimple_debug (stmt))
2158 continue;
2159
2160 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2161 walk_stmt_load_store_addr_ops (stmt, info,
2162 visit_ref_for_mod_analysis,
2163 visit_ref_for_mod_analysis,
2164 visit_ref_for_mod_analysis);
2165 }
2166 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2167 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2168 visit_ref_for_mod_analysis,
2169 visit_ref_for_mod_analysis,
2170 visit_ref_for_mod_analysis);
2171 }
2172 }
2173
2174 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2175
2176 static void
2177 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2178 {
2179 int i;
2180
2181 for (i = 0; i < param_count; i++)
2182 {
2183 if (parms_ainfo[i].parm_visited_statements)
2184 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2185 if (parms_ainfo[i].pt_visited_statements)
2186 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2187 }
2188 }
2189
2190 /* Initialize the array describing properties of of formal parameters
2191 of NODE, analyze their uses and compute jump functions associated
2192 with actual arguments of calls from within NODE. */
2193
2194 void
2195 ipa_analyze_node (struct cgraph_node *node)
2196 {
2197 struct ipa_node_params *info;
2198 struct param_analysis_info *parms_ainfo;
2199 int param_count;
2200
2201 ipa_check_create_node_params ();
2202 ipa_check_create_edge_args ();
2203 info = IPA_NODE_REF (node);
2204 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2205 ipa_initialize_node_params (node);
2206
2207 param_count = ipa_get_param_count (info);
2208 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2209 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2210
2211 ipa_analyze_params_uses (node, parms_ainfo);
2212 ipa_compute_jump_functions (node, parms_ainfo);
2213
2214 free_parms_ainfo (parms_ainfo, param_count);
2215 pop_cfun ();
2216 }
2217
2218 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2219 attempt a type-based devirtualization. If successful, return the
2220 target function declaration, otherwise return NULL. */
2221
2222 tree
2223 ipa_intraprocedural_devirtualization (gimple call)
2224 {
2225 tree binfo, token, fndecl;
2226 struct ipa_jump_func jfunc;
2227 tree otr = gimple_call_fn (call);
2228
2229 jfunc.type = IPA_JF_UNKNOWN;
2230 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2231 call, obj_type_ref_class (otr));
2232 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2233 return NULL_TREE;
2234 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2235 if (!binfo)
2236 return NULL_TREE;
2237 token = OBJ_TYPE_REF_TOKEN (otr);
2238 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2239 binfo);
2240 #ifdef ENABLE_CHECKING
2241 if (fndecl)
2242 gcc_assert (possible_polymorphic_call_target_p
2243 (otr, cgraph_get_node (fndecl)));
2244 #endif
2245 return fndecl;
2246 }
2247
2248 /* Update the jump function DST when the call graph edge corresponding to SRC is
2249 is being inlined, knowing that DST is of type ancestor and src of known
2250 type. */
2251
2252 static void
2253 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2254 struct ipa_jump_func *dst)
2255 {
2256 HOST_WIDE_INT combined_offset;
2257 tree combined_type;
2258
2259 if (!ipa_get_jf_ancestor_type_preserved (dst))
2260 {
2261 dst->type = IPA_JF_UNKNOWN;
2262 return;
2263 }
2264
2265 combined_offset = ipa_get_jf_known_type_offset (src)
2266 + ipa_get_jf_ancestor_offset (dst);
2267 combined_type = ipa_get_jf_ancestor_type (dst);
2268
2269 ipa_set_jf_known_type (dst, combined_offset,
2270 ipa_get_jf_known_type_base_type (src),
2271 combined_type);
2272 }
2273
2274 /* Update the jump functions associated with call graph edge E when the call
2275 graph edge CS is being inlined, assuming that E->caller is already (possibly
2276 indirectly) inlined into CS->callee and that E has not been inlined. */
2277
2278 static void
2279 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2280 struct cgraph_edge *e)
2281 {
2282 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2283 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2284 int count = ipa_get_cs_argument_count (args);
2285 int i;
2286
2287 for (i = 0; i < count; i++)
2288 {
2289 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2290
2291 if (dst->type == IPA_JF_ANCESTOR)
2292 {
2293 struct ipa_jump_func *src;
2294 int dst_fid = dst->value.ancestor.formal_id;
2295
2296 /* Variable number of arguments can cause havoc if we try to access
2297 one that does not exist in the inlined edge. So make sure we
2298 don't. */
2299 if (dst_fid >= ipa_get_cs_argument_count (top))
2300 {
2301 dst->type = IPA_JF_UNKNOWN;
2302 continue;
2303 }
2304
2305 src = ipa_get_ith_jump_func (top, dst_fid);
2306
2307 if (src->agg.items
2308 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2309 {
2310 struct ipa_agg_jf_item *item;
2311 int j;
2312
2313 /* Currently we do not produce clobber aggregate jump functions,
2314 replace with merging when we do. */
2315 gcc_assert (!dst->agg.items);
2316
2317 dst->agg.items = vec_safe_copy (src->agg.items);
2318 dst->agg.by_ref = src->agg.by_ref;
2319 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2320 item->offset -= dst->value.ancestor.offset;
2321 }
2322
2323 if (src->type == IPA_JF_KNOWN_TYPE)
2324 combine_known_type_and_ancestor_jfs (src, dst);
2325 else if (src->type == IPA_JF_PASS_THROUGH
2326 && src->value.pass_through.operation == NOP_EXPR)
2327 {
2328 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2329 dst->value.ancestor.agg_preserved &=
2330 src->value.pass_through.agg_preserved;
2331 dst->value.ancestor.type_preserved &=
2332 src->value.pass_through.type_preserved;
2333 }
2334 else if (src->type == IPA_JF_ANCESTOR)
2335 {
2336 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2337 dst->value.ancestor.offset += src->value.ancestor.offset;
2338 dst->value.ancestor.agg_preserved &=
2339 src->value.ancestor.agg_preserved;
2340 dst->value.ancestor.type_preserved &=
2341 src->value.ancestor.type_preserved;
2342 }
2343 else
2344 dst->type = IPA_JF_UNKNOWN;
2345 }
2346 else if (dst->type == IPA_JF_PASS_THROUGH)
2347 {
2348 struct ipa_jump_func *src;
2349 /* We must check range due to calls with variable number of arguments
2350 and we cannot combine jump functions with operations. */
2351 if (dst->value.pass_through.operation == NOP_EXPR
2352 && (dst->value.pass_through.formal_id
2353 < ipa_get_cs_argument_count (top)))
2354 {
2355 int dst_fid = dst->value.pass_through.formal_id;
2356 src = ipa_get_ith_jump_func (top, dst_fid);
2357 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2358
2359 switch (src->type)
2360 {
2361 case IPA_JF_UNKNOWN:
2362 dst->type = IPA_JF_UNKNOWN;
2363 break;
2364 case IPA_JF_KNOWN_TYPE:
2365 if (ipa_get_jf_pass_through_type_preserved (dst))
2366 ipa_set_jf_known_type (dst,
2367 ipa_get_jf_known_type_offset (src),
2368 ipa_get_jf_known_type_base_type (src),
2369 ipa_get_jf_known_type_base_type (src));
2370 else
2371 dst->type = IPA_JF_UNKNOWN;
2372 break;
2373 case IPA_JF_CONST:
2374 ipa_set_jf_cst_copy (dst, src);
2375 break;
2376
2377 case IPA_JF_PASS_THROUGH:
2378 {
2379 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2380 enum tree_code operation;
2381 operation = ipa_get_jf_pass_through_operation (src);
2382
2383 if (operation == NOP_EXPR)
2384 {
2385 bool agg_p, type_p;
2386 agg_p = dst_agg_p
2387 && ipa_get_jf_pass_through_agg_preserved (src);
2388 type_p = ipa_get_jf_pass_through_type_preserved (src)
2389 && ipa_get_jf_pass_through_type_preserved (dst);
2390 ipa_set_jf_simple_pass_through (dst, formal_id,
2391 agg_p, type_p);
2392 }
2393 else
2394 {
2395 tree operand = ipa_get_jf_pass_through_operand (src);
2396 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2397 operation);
2398 }
2399 break;
2400 }
2401 case IPA_JF_ANCESTOR:
2402 {
2403 bool agg_p, type_p;
2404 agg_p = dst_agg_p
2405 && ipa_get_jf_ancestor_agg_preserved (src);
2406 type_p = ipa_get_jf_ancestor_type_preserved (src)
2407 && ipa_get_jf_pass_through_type_preserved (dst);
2408 ipa_set_ancestor_jf (dst,
2409 ipa_get_jf_ancestor_offset (src),
2410 ipa_get_jf_ancestor_type (src),
2411 ipa_get_jf_ancestor_formal_id (src),
2412 agg_p, type_p);
2413 break;
2414 }
2415 default:
2416 gcc_unreachable ();
2417 }
2418
2419 if (src->agg.items
2420 && (dst_agg_p || !src->agg.by_ref))
2421 {
2422 /* Currently we do not produce clobber aggregate jump
2423 functions, replace with merging when we do. */
2424 gcc_assert (!dst->agg.items);
2425
2426 dst->agg.by_ref = src->agg.by_ref;
2427 dst->agg.items = vec_safe_copy (src->agg.items);
2428 }
2429 }
2430 else
2431 dst->type = IPA_JF_UNKNOWN;
2432 }
2433 }
2434 }
2435
2436 /* If TARGET is an addr_expr of a function declaration, make it the destination
2437 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2438
2439 struct cgraph_edge *
2440 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2441 {
2442 struct cgraph_node *callee;
2443 struct inline_edge_summary *es = inline_edge_summary (ie);
2444 bool unreachable = false;
2445
2446 if (TREE_CODE (target) == ADDR_EXPR)
2447 target = TREE_OPERAND (target, 0);
2448 if (TREE_CODE (target) != FUNCTION_DECL)
2449 {
2450 target = canonicalize_constructor_val (target, NULL);
2451 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2452 {
2453 if (ie->indirect_info->member_ptr)
2454 /* Member pointer call that goes through a VMT lookup. */
2455 return NULL;
2456
2457 if (dump_file)
2458 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2459 " in %s/%i, making it unreachable.\n",
2460 ie->caller->name (), ie->caller->order);
2461 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2462 callee = cgraph_get_create_node (target);
2463 unreachable = true;
2464 }
2465 else
2466 callee = cgraph_get_node (target);
2467 }
2468 else
2469 callee = cgraph_get_node (target);
2470
2471 /* Because may-edges are not explicitely represented and vtable may be external,
2472 we may create the first reference to the object in the unit. */
2473 if (!callee || callee->global.inlined_to)
2474 {
2475
2476 /* We are better to ensure we can refer to it.
2477 In the case of static functions we are out of luck, since we already
2478 removed its body. In the case of public functions we may or may
2479 not introduce the reference. */
2480 if (!canonicalize_constructor_val (target, NULL)
2481 || !TREE_PUBLIC (target))
2482 {
2483 if (dump_file)
2484 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2485 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2486 xstrdup (ie->caller->name ()),
2487 ie->caller->order,
2488 xstrdup (ie->callee->name ()),
2489 ie->callee->order);
2490 return NULL;
2491 }
2492 callee = cgraph_get_create_node (target);
2493 }
2494 ipa_check_create_node_params ();
2495
2496 /* We can not make edges to inline clones. It is bug that someone removed
2497 the cgraph node too early. */
2498 gcc_assert (!callee->global.inlined_to);
2499
2500 if (dump_file && !unreachable)
2501 {
2502 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2503 "(%s/%i -> %s/%i), for stmt ",
2504 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2505 xstrdup (ie->caller->name ()),
2506 ie->caller->order,
2507 xstrdup (callee->name ()),
2508 callee->order);
2509 if (ie->call_stmt)
2510 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2511 else
2512 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2513 }
2514 ie = cgraph_make_edge_direct (ie, callee);
2515 es = inline_edge_summary (ie);
2516 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2517 - eni_size_weights.call_cost);
2518 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2519 - eni_time_weights.call_cost);
2520
2521 return ie;
2522 }
2523
2524 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2525 return NULL if there is not any. BY_REF specifies whether the value has to
2526 be passed by reference or by value. */
2527
2528 tree
2529 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2530 HOST_WIDE_INT offset, bool by_ref)
2531 {
2532 struct ipa_agg_jf_item *item;
2533 int i;
2534
2535 if (by_ref != agg->by_ref)
2536 return NULL;
2537
2538 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2539 if (item->offset == offset)
2540 {
2541 /* Currently we do not have clobber values, return NULL for them once
2542 we do. */
2543 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2544 return item->value;
2545 }
2546 return NULL;
2547 }
2548
2549 /* Remove a reference to SYMBOL from the list of references of a node given by
2550 reference description RDESC. Return true if the reference has been
2551 successfully found and removed. */
2552
2553 static bool
2554 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2555 {
2556 struct ipa_ref *to_del;
2557 struct cgraph_edge *origin;
2558
2559 origin = rdesc->cs;
2560 if (!origin)
2561 return false;
2562 to_del = ipa_find_reference (origin->caller, symbol,
2563 origin->call_stmt, origin->lto_stmt_uid);
2564 if (!to_del)
2565 return false;
2566
2567 ipa_remove_reference (to_del);
2568 if (dump_file)
2569 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2570 xstrdup (origin->caller->name ()),
2571 origin->caller->order, xstrdup (symbol->name ()));
2572 return true;
2573 }
2574
2575 /* If JFUNC has a reference description with refcount different from
2576 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2577 NULL. JFUNC must be a constant jump function. */
2578
2579 static struct ipa_cst_ref_desc *
2580 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2581 {
2582 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2583 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2584 return rdesc;
2585 else
2586 return NULL;
2587 }
2588
2589 /* If the value of constant jump function JFUNC is an address of a function
2590 declaration, return the associated call graph node. Otherwise return
2591 NULL. */
2592
2593 static cgraph_node *
2594 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2595 {
2596 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2597 tree cst = ipa_get_jf_constant (jfunc);
2598 if (TREE_CODE (cst) != ADDR_EXPR
2599 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2600 return NULL;
2601
2602 return cgraph_get_node (TREE_OPERAND (cst, 0));
2603 }
2604
2605
2606 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2607 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2608 the edge specified in the rdesc. Return false if either the symbol or the
2609 reference could not be found, otherwise return true. */
2610
2611 static bool
2612 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2613 {
2614 struct ipa_cst_ref_desc *rdesc;
2615 if (jfunc->type == IPA_JF_CONST
2616 && (rdesc = jfunc_rdesc_usable (jfunc))
2617 && --rdesc->refcount == 0)
2618 {
2619 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2620 if (!symbol)
2621 return false;
2622
2623 return remove_described_reference (symbol, rdesc);
2624 }
2625 return true;
2626 }
2627
2628 /* Try to find a destination for indirect edge IE that corresponds to a simple
2629 call or a call of a member function pointer and where the destination is a
2630 pointer formal parameter described by jump function JFUNC. If it can be
2631 determined, return the newly direct edge, otherwise return NULL.
2632 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2633
2634 static struct cgraph_edge *
2635 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2636 struct ipa_jump_func *jfunc,
2637 struct ipa_node_params *new_root_info)
2638 {
2639 struct cgraph_edge *cs;
2640 tree target;
2641 bool agg_contents = ie->indirect_info->agg_contents;
2642
2643 if (ie->indirect_info->agg_contents)
2644 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2645 ie->indirect_info->offset,
2646 ie->indirect_info->by_ref);
2647 else
2648 target = ipa_value_from_jfunc (new_root_info, jfunc);
2649 if (!target)
2650 return NULL;
2651 cs = ipa_make_edge_direct_to_target (ie, target);
2652
2653 if (cs && !agg_contents)
2654 {
2655 bool ok;
2656 gcc_checking_assert (cs->callee
2657 && (cs != ie
2658 || jfunc->type != IPA_JF_CONST
2659 || !cgraph_node_for_jfunc (jfunc)
2660 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2661 ok = try_decrement_rdesc_refcount (jfunc);
2662 gcc_checking_assert (ok);
2663 }
2664
2665 return cs;
2666 }
2667
2668 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2669 call based on a formal parameter which is described by jump function JFUNC
2670 and if it can be determined, make it direct and return the direct edge.
2671 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2672 are relative to. */
2673
2674 static struct cgraph_edge *
2675 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2676 struct ipa_jump_func *jfunc,
2677 struct ipa_node_params *new_root_info)
2678 {
2679 tree binfo, target;
2680
2681 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2682
2683 if (!binfo)
2684 return NULL;
2685
2686 if (TREE_CODE (binfo) != TREE_BINFO)
2687 {
2688 binfo = gimple_extract_devirt_binfo_from_cst
2689 (binfo, ie->indirect_info->otr_type);
2690 if (!binfo)
2691 return NULL;
2692 }
2693
2694 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2695 ie->indirect_info->otr_type);
2696 if (binfo)
2697 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2698 binfo);
2699 else
2700 return NULL;
2701
2702 if (target)
2703 {
2704 #ifdef ENABLE_CHECKING
2705 gcc_assert (possible_polymorphic_call_target_p
2706 (ie, cgraph_get_node (target)));
2707 #endif
2708 return ipa_make_edge_direct_to_target (ie, target);
2709 }
2710 else
2711 return NULL;
2712 }
2713
2714 /* Update the param called notes associated with NODE when CS is being inlined,
2715 assuming NODE is (potentially indirectly) inlined into CS->callee.
2716 Moreover, if the callee is discovered to be constant, create a new cgraph
2717 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2718 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2719
2720 static bool
2721 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2722 struct cgraph_node *node,
2723 vec<cgraph_edge_p> *new_edges)
2724 {
2725 struct ipa_edge_args *top;
2726 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2727 struct ipa_node_params *new_root_info;
2728 bool res = false;
2729
2730 ipa_check_create_edge_args ();
2731 top = IPA_EDGE_REF (cs);
2732 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2733 ? cs->caller->global.inlined_to
2734 : cs->caller);
2735
2736 for (ie = node->indirect_calls; ie; ie = next_ie)
2737 {
2738 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2739 struct ipa_jump_func *jfunc;
2740 int param_index;
2741
2742 next_ie = ie->next_callee;
2743
2744 if (ici->param_index == -1)
2745 continue;
2746
2747 /* We must check range due to calls with variable number of arguments: */
2748 if (ici->param_index >= ipa_get_cs_argument_count (top))
2749 {
2750 ici->param_index = -1;
2751 continue;
2752 }
2753
2754 param_index = ici->param_index;
2755 jfunc = ipa_get_ith_jump_func (top, param_index);
2756
2757 if (!flag_indirect_inlining)
2758 new_direct_edge = NULL;
2759 else if (ici->polymorphic)
2760 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2761 new_root_info);
2762 else
2763 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2764 new_root_info);
2765 /* If speculation was removed, then we need to do nothing. */
2766 if (new_direct_edge && new_direct_edge != ie)
2767 {
2768 new_direct_edge->indirect_inlining_edge = 1;
2769 top = IPA_EDGE_REF (cs);
2770 res = true;
2771 }
2772 else if (new_direct_edge)
2773 {
2774 new_direct_edge->indirect_inlining_edge = 1;
2775 if (new_direct_edge->call_stmt)
2776 new_direct_edge->call_stmt_cannot_inline_p
2777 = !gimple_check_call_matching_types (
2778 new_direct_edge->call_stmt,
2779 new_direct_edge->callee->decl, false);
2780 if (new_edges)
2781 {
2782 new_edges->safe_push (new_direct_edge);
2783 res = true;
2784 }
2785 top = IPA_EDGE_REF (cs);
2786 }
2787 else if (jfunc->type == IPA_JF_PASS_THROUGH
2788 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2789 {
2790 if (ici->agg_contents
2791 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2792 ici->param_index = -1;
2793 else
2794 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2795 }
2796 else if (jfunc->type == IPA_JF_ANCESTOR)
2797 {
2798 if (ici->agg_contents
2799 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2800 ici->param_index = -1;
2801 else
2802 {
2803 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2804 if (ipa_get_jf_ancestor_offset (jfunc))
2805 ici->outer_type = NULL;
2806 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2807 }
2808 }
2809 else
2810 /* Either we can find a destination for this edge now or never. */
2811 ici->param_index = -1;
2812 }
2813
2814 return res;
2815 }
2816
2817 /* Recursively traverse subtree of NODE (including node) made of inlined
2818 cgraph_edges when CS has been inlined and invoke
2819 update_indirect_edges_after_inlining on all nodes and
2820 update_jump_functions_after_inlining on all non-inlined edges that lead out
2821 of this subtree. Newly discovered indirect edges will be added to
2822 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2823 created. */
2824
2825 static bool
2826 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2827 struct cgraph_node *node,
2828 vec<cgraph_edge_p> *new_edges)
2829 {
2830 struct cgraph_edge *e;
2831 bool res;
2832
2833 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2834
2835 for (e = node->callees; e; e = e->next_callee)
2836 if (!e->inline_failed)
2837 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2838 else
2839 update_jump_functions_after_inlining (cs, e);
2840 for (e = node->indirect_calls; e; e = e->next_callee)
2841 update_jump_functions_after_inlining (cs, e);
2842
2843 return res;
2844 }
2845
2846 /* Combine two controlled uses counts as done during inlining. */
2847
2848 static int
2849 combine_controlled_uses_counters (int c, int d)
2850 {
2851 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2852 return IPA_UNDESCRIBED_USE;
2853 else
2854 return c + d - 1;
2855 }
2856
2857 /* Propagate number of controlled users from CS->caleee to the new root of the
2858 tree of inlined nodes. */
2859
2860 static void
2861 propagate_controlled_uses (struct cgraph_edge *cs)
2862 {
2863 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2864 struct cgraph_node *new_root = cs->caller->global.inlined_to
2865 ? cs->caller->global.inlined_to : cs->caller;
2866 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2867 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2868 int count, i;
2869
2870 count = MIN (ipa_get_cs_argument_count (args),
2871 ipa_get_param_count (old_root_info));
2872 for (i = 0; i < count; i++)
2873 {
2874 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2875 struct ipa_cst_ref_desc *rdesc;
2876
2877 if (jf->type == IPA_JF_PASS_THROUGH)
2878 {
2879 int src_idx, c, d;
2880 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2881 c = ipa_get_controlled_uses (new_root_info, src_idx);
2882 d = ipa_get_controlled_uses (old_root_info, i);
2883
2884 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2885 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2886 c = combine_controlled_uses_counters (c, d);
2887 ipa_set_controlled_uses (new_root_info, src_idx, c);
2888 if (c == 0 && new_root_info->ipcp_orig_node)
2889 {
2890 struct cgraph_node *n;
2891 struct ipa_ref *ref;
2892 tree t = new_root_info->known_vals[src_idx];
2893
2894 if (t && TREE_CODE (t) == ADDR_EXPR
2895 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2896 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2897 && (ref = ipa_find_reference (new_root,
2898 n, NULL, 0)))
2899 {
2900 if (dump_file)
2901 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2902 "reference from %s/%i to %s/%i.\n",
2903 xstrdup (new_root->name ()),
2904 new_root->order,
2905 xstrdup (n->name ()), n->order);
2906 ipa_remove_reference (ref);
2907 }
2908 }
2909 }
2910 else if (jf->type == IPA_JF_CONST
2911 && (rdesc = jfunc_rdesc_usable (jf)))
2912 {
2913 int d = ipa_get_controlled_uses (old_root_info, i);
2914 int c = rdesc->refcount;
2915 rdesc->refcount = combine_controlled_uses_counters (c, d);
2916 if (rdesc->refcount == 0)
2917 {
2918 tree cst = ipa_get_jf_constant (jf);
2919 struct cgraph_node *n;
2920 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2921 && TREE_CODE (TREE_OPERAND (cst, 0))
2922 == FUNCTION_DECL);
2923 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2924 if (n)
2925 {
2926 struct cgraph_node *clone;
2927 bool ok;
2928 ok = remove_described_reference (n, rdesc);
2929 gcc_checking_assert (ok);
2930
2931 clone = cs->caller;
2932 while (clone->global.inlined_to
2933 && clone != rdesc->cs->caller
2934 && IPA_NODE_REF (clone)->ipcp_orig_node)
2935 {
2936 struct ipa_ref *ref;
2937 ref = ipa_find_reference (clone,
2938 n, NULL, 0);
2939 if (ref)
2940 {
2941 if (dump_file)
2942 fprintf (dump_file, "ipa-prop: Removing "
2943 "cloning-created reference "
2944 "from %s/%i to %s/%i.\n",
2945 xstrdup (clone->name ()),
2946 clone->order,
2947 xstrdup (n->name ()),
2948 n->order);
2949 ipa_remove_reference (ref);
2950 }
2951 clone = clone->callers->caller;
2952 }
2953 }
2954 }
2955 }
2956 }
2957
2958 for (i = ipa_get_param_count (old_root_info);
2959 i < ipa_get_cs_argument_count (args);
2960 i++)
2961 {
2962 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2963
2964 if (jf->type == IPA_JF_CONST)
2965 {
2966 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2967 if (rdesc)
2968 rdesc->refcount = IPA_UNDESCRIBED_USE;
2969 }
2970 else if (jf->type == IPA_JF_PASS_THROUGH)
2971 ipa_set_controlled_uses (new_root_info,
2972 jf->value.pass_through.formal_id,
2973 IPA_UNDESCRIBED_USE);
2974 }
2975 }
2976
2977 /* Update jump functions and call note functions on inlining the call site CS.
2978 CS is expected to lead to a node already cloned by
2979 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2980 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2981 created. */
2982
2983 bool
2984 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2985 vec<cgraph_edge_p> *new_edges)
2986 {
2987 bool changed;
2988 /* Do nothing if the preparation phase has not been carried out yet
2989 (i.e. during early inlining). */
2990 if (!ipa_node_params_vector.exists ())
2991 return false;
2992 gcc_assert (ipa_edge_args_vector);
2993
2994 propagate_controlled_uses (cs);
2995 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2996
2997 return changed;
2998 }
2999
3000 /* Frees all dynamically allocated structures that the argument info points
3001 to. */
3002
3003 void
3004 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3005 {
3006 vec_free (args->jump_functions);
3007 memset (args, 0, sizeof (*args));
3008 }
3009
3010 /* Free all ipa_edge structures. */
3011
3012 void
3013 ipa_free_all_edge_args (void)
3014 {
3015 int i;
3016 struct ipa_edge_args *args;
3017
3018 if (!ipa_edge_args_vector)
3019 return;
3020
3021 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3022 ipa_free_edge_args_substructures (args);
3023
3024 vec_free (ipa_edge_args_vector);
3025 }
3026
3027 /* Frees all dynamically allocated structures that the param info points
3028 to. */
3029
3030 void
3031 ipa_free_node_params_substructures (struct ipa_node_params *info)
3032 {
3033 info->descriptors.release ();
3034 free (info->lattices);
3035 /* Lattice values and their sources are deallocated with their alocation
3036 pool. */
3037 info->known_vals.release ();
3038 memset (info, 0, sizeof (*info));
3039 }
3040
3041 /* Free all ipa_node_params structures. */
3042
3043 void
3044 ipa_free_all_node_params (void)
3045 {
3046 int i;
3047 struct ipa_node_params *info;
3048
3049 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3050 ipa_free_node_params_substructures (info);
3051
3052 ipa_node_params_vector.release ();
3053 }
3054
3055 /* Set the aggregate replacements of NODE to be AGGVALS. */
3056
3057 void
3058 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3059 struct ipa_agg_replacement_value *aggvals)
3060 {
3061 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3062 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3063
3064 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3065 }
3066
3067 /* Hook that is called by cgraph.c when an edge is removed. */
3068
3069 static void
3070 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3071 {
3072 struct ipa_edge_args *args;
3073
3074 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3075 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3076 return;
3077
3078 args = IPA_EDGE_REF (cs);
3079 if (args->jump_functions)
3080 {
3081 struct ipa_jump_func *jf;
3082 int i;
3083 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3084 {
3085 struct ipa_cst_ref_desc *rdesc;
3086 try_decrement_rdesc_refcount (jf);
3087 if (jf->type == IPA_JF_CONST
3088 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3089 && rdesc->cs == cs)
3090 rdesc->cs = NULL;
3091 }
3092 }
3093
3094 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3095 }
3096
3097 /* Hook that is called by cgraph.c when a node is removed. */
3098
3099 static void
3100 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3101 {
3102 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3103 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3104 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3105 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3106 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3107 }
3108
3109 /* Hook that is called by cgraph.c when an edge is duplicated. */
3110
3111 static void
3112 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3113 __attribute__((unused)) void *data)
3114 {
3115 struct ipa_edge_args *old_args, *new_args;
3116 unsigned int i;
3117
3118 ipa_check_create_edge_args ();
3119
3120 old_args = IPA_EDGE_REF (src);
3121 new_args = IPA_EDGE_REF (dst);
3122
3123 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3124
3125 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3126 {
3127 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3128 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3129
3130 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3131
3132 if (src_jf->type == IPA_JF_CONST)
3133 {
3134 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3135
3136 if (!src_rdesc)
3137 dst_jf->value.constant.rdesc = NULL;
3138 else if (src->caller == dst->caller)
3139 {
3140 struct ipa_ref *ref;
3141 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3142 gcc_checking_assert (n);
3143 ref = ipa_find_reference (src->caller, n,
3144 src->call_stmt, src->lto_stmt_uid);
3145 gcc_checking_assert (ref);
3146 ipa_clone_ref (ref, dst->caller, ref->stmt);
3147
3148 gcc_checking_assert (ipa_refdesc_pool);
3149 struct ipa_cst_ref_desc *dst_rdesc
3150 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3151 dst_rdesc->cs = dst;
3152 dst_rdesc->refcount = src_rdesc->refcount;
3153 dst_rdesc->next_duplicate = NULL;
3154 dst_jf->value.constant.rdesc = dst_rdesc;
3155 }
3156 else if (src_rdesc->cs == src)
3157 {
3158 struct ipa_cst_ref_desc *dst_rdesc;
3159 gcc_checking_assert (ipa_refdesc_pool);
3160 dst_rdesc
3161 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3162 dst_rdesc->cs = dst;
3163 dst_rdesc->refcount = src_rdesc->refcount;
3164 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3165 src_rdesc->next_duplicate = dst_rdesc;
3166 dst_jf->value.constant.rdesc = dst_rdesc;
3167 }
3168 else
3169 {
3170 struct ipa_cst_ref_desc *dst_rdesc;
3171 /* This can happen during inlining, when a JFUNC can refer to a
3172 reference taken in a function up in the tree of inline clones.
3173 We need to find the duplicate that refers to our tree of
3174 inline clones. */
3175
3176 gcc_assert (dst->caller->global.inlined_to);
3177 for (dst_rdesc = src_rdesc->next_duplicate;
3178 dst_rdesc;
3179 dst_rdesc = dst_rdesc->next_duplicate)
3180 {
3181 struct cgraph_node *top;
3182 top = dst_rdesc->cs->caller->global.inlined_to
3183 ? dst_rdesc->cs->caller->global.inlined_to
3184 : dst_rdesc->cs->caller;
3185 if (dst->caller->global.inlined_to == top)
3186 break;
3187 }
3188 gcc_assert (dst_rdesc);
3189 dst_jf->value.constant.rdesc = dst_rdesc;
3190 }
3191 }
3192 }
3193 }
3194
3195 /* Hook that is called by cgraph.c when a node is duplicated. */
3196
3197 static void
3198 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3199 ATTRIBUTE_UNUSED void *data)
3200 {
3201 struct ipa_node_params *old_info, *new_info;
3202 struct ipa_agg_replacement_value *old_av, *new_av;
3203
3204 ipa_check_create_node_params ();
3205 old_info = IPA_NODE_REF (src);
3206 new_info = IPA_NODE_REF (dst);
3207
3208 new_info->descriptors = old_info->descriptors.copy ();
3209 new_info->lattices = NULL;
3210 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3211
3212 new_info->uses_analysis_done = old_info->uses_analysis_done;
3213 new_info->node_enqueued = old_info->node_enqueued;
3214
3215 old_av = ipa_get_agg_replacements_for_node (src);
3216 if (!old_av)
3217 return;
3218
3219 new_av = NULL;
3220 while (old_av)
3221 {
3222 struct ipa_agg_replacement_value *v;
3223
3224 v = ggc_alloc_ipa_agg_replacement_value ();
3225 memcpy (v, old_av, sizeof (*v));
3226 v->next = new_av;
3227 new_av = v;
3228 old_av = old_av->next;
3229 }
3230 ipa_set_node_agg_value_chain (dst, new_av);
3231 }
3232
3233
3234 /* Analyze newly added function into callgraph. */
3235
3236 static void
3237 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3238 {
3239 if (cgraph_function_with_gimple_body_p (node))
3240 ipa_analyze_node (node);
3241 }
3242
3243 /* Register our cgraph hooks if they are not already there. */
3244
3245 void
3246 ipa_register_cgraph_hooks (void)
3247 {
3248 if (!edge_removal_hook_holder)
3249 edge_removal_hook_holder =
3250 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3251 if (!node_removal_hook_holder)
3252 node_removal_hook_holder =
3253 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3254 if (!edge_duplication_hook_holder)
3255 edge_duplication_hook_holder =
3256 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3257 if (!node_duplication_hook_holder)
3258 node_duplication_hook_holder =
3259 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3260 function_insertion_hook_holder =
3261 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3262 }
3263
3264 /* Unregister our cgraph hooks if they are not already there. */
3265
3266 static void
3267 ipa_unregister_cgraph_hooks (void)
3268 {
3269 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3270 edge_removal_hook_holder = NULL;
3271 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3272 node_removal_hook_holder = NULL;
3273 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3274 edge_duplication_hook_holder = NULL;
3275 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3276 node_duplication_hook_holder = NULL;
3277 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3278 function_insertion_hook_holder = NULL;
3279 }
3280
3281 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3282 longer needed after ipa-cp. */
3283
3284 void
3285 ipa_free_all_structures_after_ipa_cp (void)
3286 {
3287 if (!optimize)
3288 {
3289 ipa_free_all_edge_args ();
3290 ipa_free_all_node_params ();
3291 free_alloc_pool (ipcp_sources_pool);
3292 free_alloc_pool (ipcp_values_pool);
3293 free_alloc_pool (ipcp_agg_lattice_pool);
3294 ipa_unregister_cgraph_hooks ();
3295 if (ipa_refdesc_pool)
3296 free_alloc_pool (ipa_refdesc_pool);
3297 }
3298 }
3299
3300 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3301 longer needed after indirect inlining. */
3302
3303 void
3304 ipa_free_all_structures_after_iinln (void)
3305 {
3306 ipa_free_all_edge_args ();
3307 ipa_free_all_node_params ();
3308 ipa_unregister_cgraph_hooks ();
3309 if (ipcp_sources_pool)
3310 free_alloc_pool (ipcp_sources_pool);
3311 if (ipcp_values_pool)
3312 free_alloc_pool (ipcp_values_pool);
3313 if (ipcp_agg_lattice_pool)
3314 free_alloc_pool (ipcp_agg_lattice_pool);
3315 if (ipa_refdesc_pool)
3316 free_alloc_pool (ipa_refdesc_pool);
3317 }
3318
3319 /* Print ipa_tree_map data structures of all functions in the
3320 callgraph to F. */
3321
3322 void
3323 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3324 {
3325 int i, count;
3326 struct ipa_node_params *info;
3327
3328 if (!node->definition)
3329 return;
3330 info = IPA_NODE_REF (node);
3331 fprintf (f, " function %s/%i parameter descriptors:\n",
3332 node->name (), node->order);
3333 count = ipa_get_param_count (info);
3334 for (i = 0; i < count; i++)
3335 {
3336 int c;
3337
3338 fprintf (f, " ");
3339 ipa_dump_param (f, info, i);
3340 if (ipa_is_param_used (info, i))
3341 fprintf (f, " used");
3342 c = ipa_get_controlled_uses (info, i);
3343 if (c == IPA_UNDESCRIBED_USE)
3344 fprintf (f, " undescribed_use");
3345 else
3346 fprintf (f, " controlled_uses=%i", c);
3347 fprintf (f, "\n");
3348 }
3349 }
3350
3351 /* Print ipa_tree_map data structures of all functions in the
3352 callgraph to F. */
3353
3354 void
3355 ipa_print_all_params (FILE * f)
3356 {
3357 struct cgraph_node *node;
3358
3359 fprintf (f, "\nFunction parameters:\n");
3360 FOR_EACH_FUNCTION (node)
3361 ipa_print_node_params (f, node);
3362 }
3363
3364 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3365
3366 vec<tree>
3367 ipa_get_vector_of_formal_parms (tree fndecl)
3368 {
3369 vec<tree> args;
3370 int count;
3371 tree parm;
3372
3373 gcc_assert (!flag_wpa);
3374 count = count_formal_params (fndecl);
3375 args.create (count);
3376 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3377 args.quick_push (parm);
3378
3379 return args;
3380 }
3381
3382 /* Return a heap allocated vector containing types of formal parameters of
3383 function type FNTYPE. */
3384
3385 vec<tree>
3386 ipa_get_vector_of_formal_parm_types (tree fntype)
3387 {
3388 vec<tree> types;
3389 int count = 0;
3390 tree t;
3391
3392 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3393 count++;
3394
3395 types.create (count);
3396 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3397 types.quick_push (TREE_VALUE (t));
3398
3399 return types;
3400 }
3401
3402 /* Modify the function declaration FNDECL and its type according to the plan in
3403 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3404 to reflect the actual parameters being modified which are determined by the
3405 base_index field. */
3406
3407 void
3408 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3409 {
3410 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3411 tree orig_type = TREE_TYPE (fndecl);
3412 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3413
3414 /* The following test is an ugly hack, some functions simply don't have any
3415 arguments in their type. This is probably a bug but well... */
3416 bool care_for_types = (old_arg_types != NULL_TREE);
3417 bool last_parm_void;
3418 vec<tree> otypes;
3419 if (care_for_types)
3420 {
3421 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3422 == void_type_node);
3423 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3424 if (last_parm_void)
3425 gcc_assert (oparms.length () + 1 == otypes.length ());
3426 else
3427 gcc_assert (oparms.length () == otypes.length ());
3428 }
3429 else
3430 {
3431 last_parm_void = false;
3432 otypes.create (0);
3433 }
3434
3435 int len = adjustments.length ();
3436 tree *link = &DECL_ARGUMENTS (fndecl);
3437 tree new_arg_types = NULL;
3438 for (int i = 0; i < len; i++)
3439 {
3440 struct ipa_parm_adjustment *adj;
3441 gcc_assert (link);
3442
3443 adj = &adjustments[i];
3444 tree parm;
3445 if (adj->op == IPA_PARM_OP_NEW)
3446 parm = NULL;
3447 else
3448 parm = oparms[adj->base_index];
3449 adj->base = parm;
3450
3451 if (adj->op == IPA_PARM_OP_COPY)
3452 {
3453 if (care_for_types)
3454 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3455 new_arg_types);
3456 *link = parm;
3457 link = &DECL_CHAIN (parm);
3458 }
3459 else if (adj->op != IPA_PARM_OP_REMOVE)
3460 {
3461 tree new_parm;
3462 tree ptype;
3463
3464 if (adj->by_ref)
3465 ptype = build_pointer_type (adj->type);
3466 else
3467 {
3468 ptype = adj->type;
3469 if (is_gimple_reg_type (ptype))
3470 {
3471 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3472 if (TYPE_ALIGN (ptype) < malign)
3473 ptype = build_aligned_type (ptype, malign);
3474 }
3475 }
3476
3477 if (care_for_types)
3478 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3479
3480 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3481 ptype);
3482 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3483 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3484 DECL_ARTIFICIAL (new_parm) = 1;
3485 DECL_ARG_TYPE (new_parm) = ptype;
3486 DECL_CONTEXT (new_parm) = fndecl;
3487 TREE_USED (new_parm) = 1;
3488 DECL_IGNORED_P (new_parm) = 1;
3489 layout_decl (new_parm, 0);
3490
3491 if (adj->op == IPA_PARM_OP_NEW)
3492 adj->base = NULL;
3493 else
3494 adj->base = parm;
3495 adj->new_decl = new_parm;
3496
3497 *link = new_parm;
3498 link = &DECL_CHAIN (new_parm);
3499 }
3500 }
3501
3502 *link = NULL_TREE;
3503
3504 tree new_reversed = NULL;
3505 if (care_for_types)
3506 {
3507 new_reversed = nreverse (new_arg_types);
3508 if (last_parm_void)
3509 {
3510 if (new_reversed)
3511 TREE_CHAIN (new_arg_types) = void_list_node;
3512 else
3513 new_reversed = void_list_node;
3514 }
3515 }
3516
3517 /* Use copy_node to preserve as much as possible from original type
3518 (debug info, attribute lists etc.)
3519 Exception is METHOD_TYPEs must have THIS argument.
3520 When we are asked to remove it, we need to build new FUNCTION_TYPE
3521 instead. */
3522 tree new_type = NULL;
3523 if (TREE_CODE (orig_type) != METHOD_TYPE
3524 || (adjustments[0].op == IPA_PARM_OP_COPY
3525 && adjustments[0].base_index == 0))
3526 {
3527 new_type = build_distinct_type_copy (orig_type);
3528 TYPE_ARG_TYPES (new_type) = new_reversed;
3529 }
3530 else
3531 {
3532 new_type
3533 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3534 new_reversed));
3535 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3536 DECL_VINDEX (fndecl) = NULL_TREE;
3537 }
3538
3539 /* When signature changes, we need to clear builtin info. */
3540 if (DECL_BUILT_IN (fndecl))
3541 {
3542 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3543 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3544 }
3545
3546 /* This is a new type, not a copy of an old type. Need to reassociate
3547 variants. We can handle everything except the main variant lazily. */
3548 tree t = TYPE_MAIN_VARIANT (orig_type);
3549 if (orig_type != t)
3550 {
3551 TYPE_MAIN_VARIANT (new_type) = t;
3552 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3553 TYPE_NEXT_VARIANT (t) = new_type;
3554 }
3555 else
3556 {
3557 TYPE_MAIN_VARIANT (new_type) = new_type;
3558 TYPE_NEXT_VARIANT (new_type) = NULL;
3559 }
3560
3561 TREE_TYPE (fndecl) = new_type;
3562 DECL_VIRTUAL_P (fndecl) = 0;
3563 otypes.release ();
3564 oparms.release ();
3565 }
3566
3567 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3568 If this is a directly recursive call, CS must be NULL. Otherwise it must
3569 contain the corresponding call graph edge. */
3570
3571 void
3572 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3573 ipa_parm_adjustment_vec adjustments)
3574 {
3575 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3576 vec<tree> vargs;
3577 vec<tree, va_gc> **debug_args = NULL;
3578 gimple new_stmt;
3579 gimple_stmt_iterator gsi, prev_gsi;
3580 tree callee_decl;
3581 int i, len;
3582
3583 len = adjustments.length ();
3584 vargs.create (len);
3585 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3586 ipa_remove_stmt_references (current_node, stmt);
3587
3588 gsi = gsi_for_stmt (stmt);
3589 prev_gsi = gsi;
3590 gsi_prev (&prev_gsi);
3591 for (i = 0; i < len; i++)
3592 {
3593 struct ipa_parm_adjustment *adj;
3594
3595 adj = &adjustments[i];
3596
3597 if (adj->op == IPA_PARM_OP_COPY)
3598 {
3599 tree arg = gimple_call_arg (stmt, adj->base_index);
3600
3601 vargs.quick_push (arg);
3602 }
3603 else if (adj->op != IPA_PARM_OP_REMOVE)
3604 {
3605 tree expr, base, off;
3606 location_t loc;
3607 unsigned int deref_align = 0;
3608 bool deref_base = false;
3609
3610 /* We create a new parameter out of the value of the old one, we can
3611 do the following kind of transformations:
3612
3613 - A scalar passed by reference is converted to a scalar passed by
3614 value. (adj->by_ref is false and the type of the original
3615 actual argument is a pointer to a scalar).
3616
3617 - A part of an aggregate is passed instead of the whole aggregate.
3618 The part can be passed either by value or by reference, this is
3619 determined by value of adj->by_ref. Moreover, the code below
3620 handles both situations when the original aggregate is passed by
3621 value (its type is not a pointer) and when it is passed by
3622 reference (it is a pointer to an aggregate).
3623
3624 When the new argument is passed by reference (adj->by_ref is true)
3625 it must be a part of an aggregate and therefore we form it by
3626 simply taking the address of a reference inside the original
3627 aggregate. */
3628
3629 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3630 base = gimple_call_arg (stmt, adj->base_index);
3631 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3632 : EXPR_LOCATION (base);
3633
3634 if (TREE_CODE (base) != ADDR_EXPR
3635 && POINTER_TYPE_P (TREE_TYPE (base)))
3636 off = build_int_cst (adj->alias_ptr_type,
3637 adj->offset / BITS_PER_UNIT);
3638 else
3639 {
3640 HOST_WIDE_INT base_offset;
3641 tree prev_base;
3642 bool addrof;
3643
3644 if (TREE_CODE (base) == ADDR_EXPR)
3645 {
3646 base = TREE_OPERAND (base, 0);
3647 addrof = true;
3648 }
3649 else
3650 addrof = false;
3651 prev_base = base;
3652 base = get_addr_base_and_unit_offset (base, &base_offset);
3653 /* Aggregate arguments can have non-invariant addresses. */
3654 if (!base)
3655 {
3656 base = build_fold_addr_expr (prev_base);
3657 off = build_int_cst (adj->alias_ptr_type,
3658 adj->offset / BITS_PER_UNIT);
3659 }
3660 else if (TREE_CODE (base) == MEM_REF)
3661 {
3662 if (!addrof)
3663 {
3664 deref_base = true;
3665 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3666 }
3667 off = build_int_cst (adj->alias_ptr_type,
3668 base_offset
3669 + adj->offset / BITS_PER_UNIT);
3670 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3671 off);
3672 base = TREE_OPERAND (base, 0);
3673 }
3674 else
3675 {
3676 off = build_int_cst (adj->alias_ptr_type,
3677 base_offset
3678 + adj->offset / BITS_PER_UNIT);
3679 base = build_fold_addr_expr (base);
3680 }
3681 }
3682
3683 if (!adj->by_ref)
3684 {
3685 tree type = adj->type;
3686 unsigned int align;
3687 unsigned HOST_WIDE_INT misalign;
3688
3689 if (deref_base)
3690 {
3691 align = deref_align;
3692 misalign = 0;
3693 }
3694 else
3695 {
3696 get_pointer_alignment_1 (base, &align, &misalign);
3697 if (TYPE_ALIGN (type) > align)
3698 align = TYPE_ALIGN (type);
3699 }
3700 misalign += (tree_to_double_int (off)
3701 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3702 * BITS_PER_UNIT);
3703 misalign = misalign & (align - 1);
3704 if (misalign != 0)
3705 align = (misalign & -misalign);
3706 if (align < TYPE_ALIGN (type))
3707 type = build_aligned_type (type, align);
3708 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3709 }
3710 else
3711 {
3712 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3713 expr = build_fold_addr_expr (expr);
3714 }
3715
3716 expr = force_gimple_operand_gsi (&gsi, expr,
3717 adj->by_ref
3718 || is_gimple_reg_type (adj->type),
3719 NULL, true, GSI_SAME_STMT);
3720 vargs.quick_push (expr);
3721 }
3722 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
3723 {
3724 unsigned int ix;
3725 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3726 gimple def_temp;
3727
3728 arg = gimple_call_arg (stmt, adj->base_index);
3729 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3730 {
3731 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3732 continue;
3733 arg = fold_convert_loc (gimple_location (stmt),
3734 TREE_TYPE (origin), arg);
3735 }
3736 if (debug_args == NULL)
3737 debug_args = decl_debug_args_insert (callee_decl);
3738 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3739 if (ddecl == origin)
3740 {
3741 ddecl = (**debug_args)[ix + 1];
3742 break;
3743 }
3744 if (ddecl == NULL)
3745 {
3746 ddecl = make_node (DEBUG_EXPR_DECL);
3747 DECL_ARTIFICIAL (ddecl) = 1;
3748 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3749 DECL_MODE (ddecl) = DECL_MODE (origin);
3750
3751 vec_safe_push (*debug_args, origin);
3752 vec_safe_push (*debug_args, ddecl);
3753 }
3754 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3755 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3756 }
3757 }
3758
3759 if (dump_file && (dump_flags & TDF_DETAILS))
3760 {
3761 fprintf (dump_file, "replacing stmt:");
3762 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3763 }
3764
3765 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3766 vargs.release ();
3767 if (gimple_call_lhs (stmt))
3768 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3769
3770 gimple_set_block (new_stmt, gimple_block (stmt));
3771 if (gimple_has_location (stmt))
3772 gimple_set_location (new_stmt, gimple_location (stmt));
3773 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3774 gimple_call_copy_flags (new_stmt, stmt);
3775
3776 if (dump_file && (dump_flags & TDF_DETAILS))
3777 {
3778 fprintf (dump_file, "with stmt:");
3779 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3780 fprintf (dump_file, "\n");
3781 }
3782 gsi_replace (&gsi, new_stmt, true);
3783 if (cs)
3784 cgraph_set_call_stmt (cs, new_stmt);
3785 do
3786 {
3787 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3788 gsi_prev (&gsi);
3789 }
3790 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3791 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3792
3793 update_ssa (TODO_update_ssa);
3794 free_dominance_info (CDI_DOMINATORS);
3795 }
3796
3797 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3798 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3799 specifies whether the function should care about type incompatibility the
3800 current and new expressions. If it is false, the function will leave
3801 incompatibility issues to the caller. Return true iff the expression
3802 was modified. */
3803
3804 bool
3805 ipa_modify_expr (tree *expr, bool convert,
3806 ipa_parm_adjustment_vec adjustments)
3807 {
3808 struct ipa_parm_adjustment *cand
3809 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
3810 if (!cand)
3811 return false;
3812
3813 tree src;
3814 if (cand->by_ref)
3815 src = build_simple_mem_ref (cand->new_decl);
3816 else
3817 src = cand->new_decl;
3818
3819 if (dump_file && (dump_flags & TDF_DETAILS))
3820 {
3821 fprintf (dump_file, "About to replace expr ");
3822 print_generic_expr (dump_file, *expr, 0);
3823 fprintf (dump_file, " with ");
3824 print_generic_expr (dump_file, src, 0);
3825 fprintf (dump_file, "\n");
3826 }
3827
3828 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3829 {
3830 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3831 *expr = vce;
3832 }
3833 else
3834 *expr = src;
3835 return true;
3836 }
3837
3838 /* If T is an SSA_NAME, return NULL if it is not a default def or
3839 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
3840 the base variable is always returned, regardless if it is a default
3841 def. Return T if it is not an SSA_NAME. */
3842
3843 static tree
3844 get_ssa_base_param (tree t, bool ignore_default_def)
3845 {
3846 if (TREE_CODE (t) == SSA_NAME)
3847 {
3848 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
3849 return SSA_NAME_VAR (t);
3850 else
3851 return NULL_TREE;
3852 }
3853 return t;
3854 }
3855
3856 /* Given an expression, return an adjustment entry specifying the
3857 transformation to be done on EXPR. If no suitable adjustment entry
3858 was found, returns NULL.
3859
3860 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
3861 default def, otherwise bail on them.
3862
3863 If CONVERT is non-NULL, this function will set *CONVERT if the
3864 expression provided is a component reference. ADJUSTMENTS is the
3865 adjustments vector. */
3866
3867 ipa_parm_adjustment *
3868 ipa_get_adjustment_candidate (tree **expr, bool *convert,
3869 ipa_parm_adjustment_vec adjustments,
3870 bool ignore_default_def)
3871 {
3872 if (TREE_CODE (**expr) == BIT_FIELD_REF
3873 || TREE_CODE (**expr) == IMAGPART_EXPR
3874 || TREE_CODE (**expr) == REALPART_EXPR)
3875 {
3876 *expr = &TREE_OPERAND (**expr, 0);
3877 if (convert)
3878 *convert = true;
3879 }
3880
3881 HOST_WIDE_INT offset, size, max_size;
3882 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
3883 if (!base || size == -1 || max_size == -1)
3884 return NULL;
3885
3886 if (TREE_CODE (base) == MEM_REF)
3887 {
3888 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
3889 base = TREE_OPERAND (base, 0);
3890 }
3891
3892 base = get_ssa_base_param (base, ignore_default_def);
3893 if (!base || TREE_CODE (base) != PARM_DECL)
3894 return NULL;
3895
3896 struct ipa_parm_adjustment *cand = NULL;
3897 unsigned int len = adjustments.length ();
3898 for (unsigned i = 0; i < len; i++)
3899 {
3900 struct ipa_parm_adjustment *adj = &adjustments[i];
3901
3902 if (adj->base == base
3903 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
3904 {
3905 cand = adj;
3906 break;
3907 }
3908 }
3909
3910 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
3911 return NULL;
3912 return cand;
3913 }
3914
3915 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3916
3917 static bool
3918 index_in_adjustments_multiple_times_p (int base_index,
3919 ipa_parm_adjustment_vec adjustments)
3920 {
3921 int i, len = adjustments.length ();
3922 bool one = false;
3923
3924 for (i = 0; i < len; i++)
3925 {
3926 struct ipa_parm_adjustment *adj;
3927 adj = &adjustments[i];
3928
3929 if (adj->base_index == base_index)
3930 {
3931 if (one)
3932 return true;
3933 else
3934 one = true;
3935 }
3936 }
3937 return false;
3938 }
3939
3940
3941 /* Return adjustments that should have the same effect on function parameters
3942 and call arguments as if they were first changed according to adjustments in
3943 INNER and then by adjustments in OUTER. */
3944
3945 ipa_parm_adjustment_vec
3946 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3947 ipa_parm_adjustment_vec outer)
3948 {
3949 int i, outlen = outer.length ();
3950 int inlen = inner.length ();
3951 int removals = 0;
3952 ipa_parm_adjustment_vec adjustments, tmp;
3953
3954 tmp.create (inlen);
3955 for (i = 0; i < inlen; i++)
3956 {
3957 struct ipa_parm_adjustment *n;
3958 n = &inner[i];
3959
3960 if (n->op == IPA_PARM_OP_REMOVE)
3961 removals++;
3962 else
3963 {
3964 /* FIXME: Handling of new arguments are not implemented yet. */
3965 gcc_assert (n->op != IPA_PARM_OP_NEW);
3966 tmp.quick_push (*n);
3967 }
3968 }
3969
3970 adjustments.create (outlen + removals);
3971 for (i = 0; i < outlen; i++)
3972 {
3973 struct ipa_parm_adjustment r;
3974 struct ipa_parm_adjustment *out = &outer[i];
3975 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3976
3977 memset (&r, 0, sizeof (r));
3978 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
3979 if (out->op == IPA_PARM_OP_REMOVE)
3980 {
3981 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3982 {
3983 r.op = IPA_PARM_OP_REMOVE;
3984 adjustments.quick_push (r);
3985 }
3986 continue;
3987 }
3988 else
3989 {
3990 /* FIXME: Handling of new arguments are not implemented yet. */
3991 gcc_assert (out->op != IPA_PARM_OP_NEW);
3992 }
3993
3994 r.base_index = in->base_index;
3995 r.type = out->type;
3996
3997 /* FIXME: Create nonlocal value too. */
3998
3999 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4000 r.op = IPA_PARM_OP_COPY;
4001 else if (in->op == IPA_PARM_OP_COPY)
4002 r.offset = out->offset;
4003 else if (out->op == IPA_PARM_OP_COPY)
4004 r.offset = in->offset;
4005 else
4006 r.offset = in->offset + out->offset;
4007 adjustments.quick_push (r);
4008 }
4009
4010 for (i = 0; i < inlen; i++)
4011 {
4012 struct ipa_parm_adjustment *n = &inner[i];
4013
4014 if (n->op == IPA_PARM_OP_REMOVE)
4015 adjustments.quick_push (*n);
4016 }
4017
4018 tmp.release ();
4019 return adjustments;
4020 }
4021
4022 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4023 friendly way, assuming they are meant to be applied to FNDECL. */
4024
4025 void
4026 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4027 tree fndecl)
4028 {
4029 int i, len = adjustments.length ();
4030 bool first = true;
4031 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4032
4033 fprintf (file, "IPA param adjustments: ");
4034 for (i = 0; i < len; i++)
4035 {
4036 struct ipa_parm_adjustment *adj;
4037 adj = &adjustments[i];
4038
4039 if (!first)
4040 fprintf (file, " ");
4041 else
4042 first = false;
4043
4044 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4045 print_generic_expr (file, parms[adj->base_index], 0);
4046 if (adj->base)
4047 {
4048 fprintf (file, ", base: ");
4049 print_generic_expr (file, adj->base, 0);
4050 }
4051 if (adj->new_decl)
4052 {
4053 fprintf (file, ", new_decl: ");
4054 print_generic_expr (file, adj->new_decl, 0);
4055 }
4056 if (adj->new_ssa_base)
4057 {
4058 fprintf (file, ", new_ssa_base: ");
4059 print_generic_expr (file, adj->new_ssa_base, 0);
4060 }
4061
4062 if (adj->op == IPA_PARM_OP_COPY)
4063 fprintf (file, ", copy_param");
4064 else if (adj->op == IPA_PARM_OP_REMOVE)
4065 fprintf (file, ", remove_param");
4066 else
4067 fprintf (file, ", offset %li", (long) adj->offset);
4068 if (adj->by_ref)
4069 fprintf (file, ", by_ref");
4070 print_node_brief (file, ", type: ", adj->type, 0);
4071 fprintf (file, "\n");
4072 }
4073 parms.release ();
4074 }
4075
4076 /* Dump the AV linked list. */
4077
4078 void
4079 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4080 {
4081 bool comma = false;
4082 fprintf (f, " Aggregate replacements:");
4083 for (; av; av = av->next)
4084 {
4085 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4086 av->index, av->offset);
4087 print_generic_expr (f, av->value, 0);
4088 comma = true;
4089 }
4090 fprintf (f, "\n");
4091 }
4092
4093 /* Stream out jump function JUMP_FUNC to OB. */
4094
4095 static void
4096 ipa_write_jump_function (struct output_block *ob,
4097 struct ipa_jump_func *jump_func)
4098 {
4099 struct ipa_agg_jf_item *item;
4100 struct bitpack_d bp;
4101 int i, count;
4102
4103 streamer_write_uhwi (ob, jump_func->type);
4104 switch (jump_func->type)
4105 {
4106 case IPA_JF_UNKNOWN:
4107 break;
4108 case IPA_JF_KNOWN_TYPE:
4109 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4110 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4111 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4112 break;
4113 case IPA_JF_CONST:
4114 gcc_assert (
4115 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4116 stream_write_tree (ob, jump_func->value.constant.value, true);
4117 break;
4118 case IPA_JF_PASS_THROUGH:
4119 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4120 if (jump_func->value.pass_through.operation == NOP_EXPR)
4121 {
4122 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4123 bp = bitpack_create (ob->main_stream);
4124 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4125 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4126 streamer_write_bitpack (&bp);
4127 }
4128 else
4129 {
4130 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4131 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4132 }
4133 break;
4134 case IPA_JF_ANCESTOR:
4135 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4136 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4137 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4138 bp = bitpack_create (ob->main_stream);
4139 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4140 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4141 streamer_write_bitpack (&bp);
4142 break;
4143 }
4144
4145 count = vec_safe_length (jump_func->agg.items);
4146 streamer_write_uhwi (ob, count);
4147 if (count)
4148 {
4149 bp = bitpack_create (ob->main_stream);
4150 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4151 streamer_write_bitpack (&bp);
4152 }
4153
4154 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4155 {
4156 streamer_write_uhwi (ob, item->offset);
4157 stream_write_tree (ob, item->value, true);
4158 }
4159 }
4160
4161 /* Read in jump function JUMP_FUNC from IB. */
4162
4163 static void
4164 ipa_read_jump_function (struct lto_input_block *ib,
4165 struct ipa_jump_func *jump_func,
4166 struct cgraph_edge *cs,
4167 struct data_in *data_in)
4168 {
4169 enum jump_func_type jftype;
4170 enum tree_code operation;
4171 int i, count;
4172
4173 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4174 switch (jftype)
4175 {
4176 case IPA_JF_UNKNOWN:
4177 jump_func->type = IPA_JF_UNKNOWN;
4178 break;
4179 case IPA_JF_KNOWN_TYPE:
4180 {
4181 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4182 tree base_type = stream_read_tree (ib, data_in);
4183 tree component_type = stream_read_tree (ib, data_in);
4184
4185 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4186 break;
4187 }
4188 case IPA_JF_CONST:
4189 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4190 break;
4191 case IPA_JF_PASS_THROUGH:
4192 operation = (enum tree_code) streamer_read_uhwi (ib);
4193 if (operation == NOP_EXPR)
4194 {
4195 int formal_id = streamer_read_uhwi (ib);
4196 struct bitpack_d bp = streamer_read_bitpack (ib);
4197 bool agg_preserved = bp_unpack_value (&bp, 1);
4198 bool type_preserved = bp_unpack_value (&bp, 1);
4199 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4200 type_preserved);
4201 }
4202 else
4203 {
4204 tree operand = stream_read_tree (ib, data_in);
4205 int formal_id = streamer_read_uhwi (ib);
4206 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4207 operation);
4208 }
4209 break;
4210 case IPA_JF_ANCESTOR:
4211 {
4212 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4213 tree type = stream_read_tree (ib, data_in);
4214 int formal_id = streamer_read_uhwi (ib);
4215 struct bitpack_d bp = streamer_read_bitpack (ib);
4216 bool agg_preserved = bp_unpack_value (&bp, 1);
4217 bool type_preserved = bp_unpack_value (&bp, 1);
4218
4219 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4220 type_preserved);
4221 break;
4222 }
4223 }
4224
4225 count = streamer_read_uhwi (ib);
4226 vec_alloc (jump_func->agg.items, count);
4227 if (count)
4228 {
4229 struct bitpack_d bp = streamer_read_bitpack (ib);
4230 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4231 }
4232 for (i = 0; i < count; i++)
4233 {
4234 struct ipa_agg_jf_item item;
4235 item.offset = streamer_read_uhwi (ib);
4236 item.value = stream_read_tree (ib, data_in);
4237 jump_func->agg.items->quick_push (item);
4238 }
4239 }
4240
4241 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4242 relevant to indirect inlining to OB. */
4243
4244 static void
4245 ipa_write_indirect_edge_info (struct output_block *ob,
4246 struct cgraph_edge *cs)
4247 {
4248 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4249 struct bitpack_d bp;
4250
4251 streamer_write_hwi (ob, ii->param_index);
4252 streamer_write_hwi (ob, ii->offset);
4253 bp = bitpack_create (ob->main_stream);
4254 bp_pack_value (&bp, ii->polymorphic, 1);
4255 bp_pack_value (&bp, ii->agg_contents, 1);
4256 bp_pack_value (&bp, ii->member_ptr, 1);
4257 bp_pack_value (&bp, ii->by_ref, 1);
4258 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4259 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4260 streamer_write_bitpack (&bp);
4261
4262 if (ii->polymorphic)
4263 {
4264 streamer_write_hwi (ob, ii->otr_token);
4265 stream_write_tree (ob, ii->otr_type, true);
4266 stream_write_tree (ob, ii->outer_type, true);
4267 }
4268 }
4269
4270 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4271 relevant to indirect inlining from IB. */
4272
4273 static void
4274 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4275 struct data_in *data_in ATTRIBUTE_UNUSED,
4276 struct cgraph_edge *cs)
4277 {
4278 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4279 struct bitpack_d bp;
4280
4281 ii->param_index = (int) streamer_read_hwi (ib);
4282 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4283 bp = streamer_read_bitpack (ib);
4284 ii->polymorphic = bp_unpack_value (&bp, 1);
4285 ii->agg_contents = bp_unpack_value (&bp, 1);
4286 ii->member_ptr = bp_unpack_value (&bp, 1);
4287 ii->by_ref = bp_unpack_value (&bp, 1);
4288 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4289 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4290 if (ii->polymorphic)
4291 {
4292 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4293 ii->otr_type = stream_read_tree (ib, data_in);
4294 ii->outer_type = stream_read_tree (ib, data_in);
4295 }
4296 }
4297
4298 /* Stream out NODE info to OB. */
4299
4300 static void
4301 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4302 {
4303 int node_ref;
4304 lto_symtab_encoder_t encoder;
4305 struct ipa_node_params *info = IPA_NODE_REF (node);
4306 int j;
4307 struct cgraph_edge *e;
4308 struct bitpack_d bp;
4309
4310 encoder = ob->decl_state->symtab_node_encoder;
4311 node_ref = lto_symtab_encoder_encode (encoder, node);
4312 streamer_write_uhwi (ob, node_ref);
4313
4314 streamer_write_uhwi (ob, ipa_get_param_count (info));
4315 for (j = 0; j < ipa_get_param_count (info); j++)
4316 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4317 bp = bitpack_create (ob->main_stream);
4318 gcc_assert (info->uses_analysis_done
4319 || ipa_get_param_count (info) == 0);
4320 gcc_assert (!info->node_enqueued);
4321 gcc_assert (!info->ipcp_orig_node);
4322 for (j = 0; j < ipa_get_param_count (info); j++)
4323 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4324 streamer_write_bitpack (&bp);
4325 for (j = 0; j < ipa_get_param_count (info); j++)
4326 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4327 for (e = node->callees; e; e = e->next_callee)
4328 {
4329 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4330
4331 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4332 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4333 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4334 }
4335 for (e = node->indirect_calls; e; e = e->next_callee)
4336 {
4337 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4338
4339 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4340 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4341 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4342 ipa_write_indirect_edge_info (ob, e);
4343 }
4344 }
4345
4346 /* Stream in NODE info from IB. */
4347
4348 static void
4349 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4350 struct data_in *data_in)
4351 {
4352 struct ipa_node_params *info = IPA_NODE_REF (node);
4353 int k;
4354 struct cgraph_edge *e;
4355 struct bitpack_d bp;
4356
4357 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4358
4359 for (k = 0; k < ipa_get_param_count (info); k++)
4360 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4361
4362 bp = streamer_read_bitpack (ib);
4363 if (ipa_get_param_count (info) != 0)
4364 info->uses_analysis_done = true;
4365 info->node_enqueued = false;
4366 for (k = 0; k < ipa_get_param_count (info); k++)
4367 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4368 for (k = 0; k < ipa_get_param_count (info); k++)
4369 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4370 for (e = node->callees; e; e = e->next_callee)
4371 {
4372 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4373 int count = streamer_read_uhwi (ib);
4374
4375 if (!count)
4376 continue;
4377 vec_safe_grow_cleared (args->jump_functions, count);
4378
4379 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4380 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4381 data_in);
4382 }
4383 for (e = node->indirect_calls; e; e = e->next_callee)
4384 {
4385 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4386 int count = streamer_read_uhwi (ib);
4387
4388 if (count)
4389 {
4390 vec_safe_grow_cleared (args->jump_functions, count);
4391 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4392 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4393 data_in);
4394 }
4395 ipa_read_indirect_edge_info (ib, data_in, e);
4396 }
4397 }
4398
4399 /* Write jump functions for nodes in SET. */
4400
4401 void
4402 ipa_prop_write_jump_functions (void)
4403 {
4404 struct cgraph_node *node;
4405 struct output_block *ob;
4406 unsigned int count = 0;
4407 lto_symtab_encoder_iterator lsei;
4408 lto_symtab_encoder_t encoder;
4409
4410
4411 if (!ipa_node_params_vector.exists ())
4412 return;
4413
4414 ob = create_output_block (LTO_section_jump_functions);
4415 encoder = ob->decl_state->symtab_node_encoder;
4416 ob->cgraph_node = NULL;
4417 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4418 lsei_next_function_in_partition (&lsei))
4419 {
4420 node = lsei_cgraph_node (lsei);
4421 if (cgraph_function_with_gimple_body_p (node)
4422 && IPA_NODE_REF (node) != NULL)
4423 count++;
4424 }
4425
4426 streamer_write_uhwi (ob, count);
4427
4428 /* Process all of the functions. */
4429 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4430 lsei_next_function_in_partition (&lsei))
4431 {
4432 node = lsei_cgraph_node (lsei);
4433 if (cgraph_function_with_gimple_body_p (node)
4434 && IPA_NODE_REF (node) != NULL)
4435 ipa_write_node_info (ob, node);
4436 }
4437 streamer_write_char_stream (ob->main_stream, 0);
4438 produce_asm (ob, NULL);
4439 destroy_output_block (ob);
4440 }
4441
4442 /* Read section in file FILE_DATA of length LEN with data DATA. */
4443
4444 static void
4445 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4446 size_t len)
4447 {
4448 const struct lto_function_header *header =
4449 (const struct lto_function_header *) data;
4450 const int cfg_offset = sizeof (struct lto_function_header);
4451 const int main_offset = cfg_offset + header->cfg_size;
4452 const int string_offset = main_offset + header->main_size;
4453 struct data_in *data_in;
4454 struct lto_input_block ib_main;
4455 unsigned int i;
4456 unsigned int count;
4457
4458 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4459 header->main_size);
4460
4461 data_in =
4462 lto_data_in_create (file_data, (const char *) data + string_offset,
4463 header->string_size, vNULL);
4464 count = streamer_read_uhwi (&ib_main);
4465
4466 for (i = 0; i < count; i++)
4467 {
4468 unsigned int index;
4469 struct cgraph_node *node;
4470 lto_symtab_encoder_t encoder;
4471
4472 index = streamer_read_uhwi (&ib_main);
4473 encoder = file_data->symtab_node_encoder;
4474 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4475 gcc_assert (node->definition);
4476 ipa_read_node_info (&ib_main, node, data_in);
4477 }
4478 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4479 len);
4480 lto_data_in_delete (data_in);
4481 }
4482
4483 /* Read ipcp jump functions. */
4484
4485 void
4486 ipa_prop_read_jump_functions (void)
4487 {
4488 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4489 struct lto_file_decl_data *file_data;
4490 unsigned int j = 0;
4491
4492 ipa_check_create_node_params ();
4493 ipa_check_create_edge_args ();
4494 ipa_register_cgraph_hooks ();
4495
4496 while ((file_data = file_data_vec[j++]))
4497 {
4498 size_t len;
4499 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4500
4501 if (data)
4502 ipa_prop_read_section (file_data, data, len);
4503 }
4504 }
4505
4506 /* After merging units, we can get mismatch in argument counts.
4507 Also decl merging might've rendered parameter lists obsolete.
4508 Also compute called_with_variable_arg info. */
4509
4510 void
4511 ipa_update_after_lto_read (void)
4512 {
4513 ipa_check_create_node_params ();
4514 ipa_check_create_edge_args ();
4515 }
4516
4517 void
4518 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4519 {
4520 int node_ref;
4521 unsigned int count = 0;
4522 lto_symtab_encoder_t encoder;
4523 struct ipa_agg_replacement_value *aggvals, *av;
4524
4525 aggvals = ipa_get_agg_replacements_for_node (node);
4526 encoder = ob->decl_state->symtab_node_encoder;
4527 node_ref = lto_symtab_encoder_encode (encoder, node);
4528 streamer_write_uhwi (ob, node_ref);
4529
4530 for (av = aggvals; av; av = av->next)
4531 count++;
4532 streamer_write_uhwi (ob, count);
4533
4534 for (av = aggvals; av; av = av->next)
4535 {
4536 struct bitpack_d bp;
4537
4538 streamer_write_uhwi (ob, av->offset);
4539 streamer_write_uhwi (ob, av->index);
4540 stream_write_tree (ob, av->value, true);
4541
4542 bp = bitpack_create (ob->main_stream);
4543 bp_pack_value (&bp, av->by_ref, 1);
4544 streamer_write_bitpack (&bp);
4545 }
4546 }
4547
4548 /* Stream in the aggregate value replacement chain for NODE from IB. */
4549
4550 static void
4551 read_agg_replacement_chain (struct lto_input_block *ib,
4552 struct cgraph_node *node,
4553 struct data_in *data_in)
4554 {
4555 struct ipa_agg_replacement_value *aggvals = NULL;
4556 unsigned int count, i;
4557
4558 count = streamer_read_uhwi (ib);
4559 for (i = 0; i <count; i++)
4560 {
4561 struct ipa_agg_replacement_value *av;
4562 struct bitpack_d bp;
4563
4564 av = ggc_alloc_ipa_agg_replacement_value ();
4565 av->offset = streamer_read_uhwi (ib);
4566 av->index = streamer_read_uhwi (ib);
4567 av->value = stream_read_tree (ib, data_in);
4568 bp = streamer_read_bitpack (ib);
4569 av->by_ref = bp_unpack_value (&bp, 1);
4570 av->next = aggvals;
4571 aggvals = av;
4572 }
4573 ipa_set_node_agg_value_chain (node, aggvals);
4574 }
4575
4576 /* Write all aggregate replacement for nodes in set. */
4577
4578 void
4579 ipa_prop_write_all_agg_replacement (void)
4580 {
4581 struct cgraph_node *node;
4582 struct output_block *ob;
4583 unsigned int count = 0;
4584 lto_symtab_encoder_iterator lsei;
4585 lto_symtab_encoder_t encoder;
4586
4587 if (!ipa_node_agg_replacements)
4588 return;
4589
4590 ob = create_output_block (LTO_section_ipcp_transform);
4591 encoder = ob->decl_state->symtab_node_encoder;
4592 ob->cgraph_node = NULL;
4593 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4594 lsei_next_function_in_partition (&lsei))
4595 {
4596 node = lsei_cgraph_node (lsei);
4597 if (cgraph_function_with_gimple_body_p (node)
4598 && ipa_get_agg_replacements_for_node (node) != NULL)
4599 count++;
4600 }
4601
4602 streamer_write_uhwi (ob, count);
4603
4604 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4605 lsei_next_function_in_partition (&lsei))
4606 {
4607 node = lsei_cgraph_node (lsei);
4608 if (cgraph_function_with_gimple_body_p (node)
4609 && ipa_get_agg_replacements_for_node (node) != NULL)
4610 write_agg_replacement_chain (ob, node);
4611 }
4612 streamer_write_char_stream (ob->main_stream, 0);
4613 produce_asm (ob, NULL);
4614 destroy_output_block (ob);
4615 }
4616
4617 /* Read replacements section in file FILE_DATA of length LEN with data
4618 DATA. */
4619
4620 static void
4621 read_replacements_section (struct lto_file_decl_data *file_data,
4622 const char *data,
4623 size_t len)
4624 {
4625 const struct lto_function_header *header =
4626 (const struct lto_function_header *) data;
4627 const int cfg_offset = sizeof (struct lto_function_header);
4628 const int main_offset = cfg_offset + header->cfg_size;
4629 const int string_offset = main_offset + header->main_size;
4630 struct data_in *data_in;
4631 struct lto_input_block ib_main;
4632 unsigned int i;
4633 unsigned int count;
4634
4635 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4636 header->main_size);
4637
4638 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4639 header->string_size, vNULL);
4640 count = streamer_read_uhwi (&ib_main);
4641
4642 for (i = 0; i < count; i++)
4643 {
4644 unsigned int index;
4645 struct cgraph_node *node;
4646 lto_symtab_encoder_t encoder;
4647
4648 index = streamer_read_uhwi (&ib_main);
4649 encoder = file_data->symtab_node_encoder;
4650 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4651 gcc_assert (node->definition);
4652 read_agg_replacement_chain (&ib_main, node, data_in);
4653 }
4654 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4655 len);
4656 lto_data_in_delete (data_in);
4657 }
4658
4659 /* Read IPA-CP aggregate replacements. */
4660
4661 void
4662 ipa_prop_read_all_agg_replacement (void)
4663 {
4664 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4665 struct lto_file_decl_data *file_data;
4666 unsigned int j = 0;
4667
4668 while ((file_data = file_data_vec[j++]))
4669 {
4670 size_t len;
4671 const char *data = lto_get_section_data (file_data,
4672 LTO_section_ipcp_transform,
4673 NULL, &len);
4674 if (data)
4675 read_replacements_section (file_data, data, len);
4676 }
4677 }
4678
4679 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4680 NODE. */
4681
4682 static void
4683 adjust_agg_replacement_values (struct cgraph_node *node,
4684 struct ipa_agg_replacement_value *aggval)
4685 {
4686 struct ipa_agg_replacement_value *v;
4687 int i, c = 0, d = 0, *adj;
4688
4689 if (!node->clone.combined_args_to_skip)
4690 return;
4691
4692 for (v = aggval; v; v = v->next)
4693 {
4694 gcc_assert (v->index >= 0);
4695 if (c < v->index)
4696 c = v->index;
4697 }
4698 c++;
4699
4700 adj = XALLOCAVEC (int, c);
4701 for (i = 0; i < c; i++)
4702 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4703 {
4704 adj[i] = -1;
4705 d++;
4706 }
4707 else
4708 adj[i] = i - d;
4709
4710 for (v = aggval; v; v = v->next)
4711 v->index = adj[v->index];
4712 }
4713
4714
4715 /* Function body transformation phase. */
4716
4717 unsigned int
4718 ipcp_transform_function (struct cgraph_node *node)
4719 {
4720 vec<ipa_param_descriptor> descriptors = vNULL;
4721 struct param_analysis_info *parms_ainfo;
4722 struct ipa_agg_replacement_value *aggval;
4723 gimple_stmt_iterator gsi;
4724 basic_block bb;
4725 int param_count;
4726 bool cfg_changed = false, something_changed = false;
4727
4728 gcc_checking_assert (cfun);
4729 gcc_checking_assert (current_function_decl);
4730
4731 if (dump_file)
4732 fprintf (dump_file, "Modification phase of node %s/%i\n",
4733 node->name (), node->order);
4734
4735 aggval = ipa_get_agg_replacements_for_node (node);
4736 if (!aggval)
4737 return 0;
4738 param_count = count_formal_params (node->decl);
4739 if (param_count == 0)
4740 return 0;
4741 adjust_agg_replacement_values (node, aggval);
4742 if (dump_file)
4743 ipa_dump_agg_replacement_values (dump_file, aggval);
4744 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4745 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4746 descriptors.safe_grow_cleared (param_count);
4747 ipa_populate_param_decls (node, descriptors);
4748
4749 FOR_EACH_BB_FN (bb, cfun)
4750 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4751 {
4752 struct ipa_agg_replacement_value *v;
4753 gimple stmt = gsi_stmt (gsi);
4754 tree rhs, val, t;
4755 HOST_WIDE_INT offset, size;
4756 int index;
4757 bool by_ref, vce;
4758
4759 if (!gimple_assign_load_p (stmt))
4760 continue;
4761 rhs = gimple_assign_rhs1 (stmt);
4762 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4763 continue;
4764
4765 vce = false;
4766 t = rhs;
4767 while (handled_component_p (t))
4768 {
4769 /* V_C_E can do things like convert an array of integers to one
4770 bigger integer and similar things we do not handle below. */
4771 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4772 {
4773 vce = true;
4774 break;
4775 }
4776 t = TREE_OPERAND (t, 0);
4777 }
4778 if (vce)
4779 continue;
4780
4781 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4782 rhs, &index, &offset, &size, &by_ref))
4783 continue;
4784 for (v = aggval; v; v = v->next)
4785 if (v->index == index
4786 && v->offset == offset)
4787 break;
4788 if (!v
4789 || v->by_ref != by_ref
4790 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4791 continue;
4792
4793 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4794 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4795 {
4796 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4797 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4798 else if (TYPE_SIZE (TREE_TYPE (rhs))
4799 == TYPE_SIZE (TREE_TYPE (v->value)))
4800 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4801 else
4802 {
4803 if (dump_file)
4804 {
4805 fprintf (dump_file, " const ");
4806 print_generic_expr (dump_file, v->value, 0);
4807 fprintf (dump_file, " can't be converted to type of ");
4808 print_generic_expr (dump_file, rhs, 0);
4809 fprintf (dump_file, "\n");
4810 }
4811 continue;
4812 }
4813 }
4814 else
4815 val = v->value;
4816
4817 if (dump_file && (dump_flags & TDF_DETAILS))
4818 {
4819 fprintf (dump_file, "Modifying stmt:\n ");
4820 print_gimple_stmt (dump_file, stmt, 0, 0);
4821 }
4822 gimple_assign_set_rhs_from_tree (&gsi, val);
4823 update_stmt (stmt);
4824
4825 if (dump_file && (dump_flags & TDF_DETAILS))
4826 {
4827 fprintf (dump_file, "into:\n ");
4828 print_gimple_stmt (dump_file, stmt, 0, 0);
4829 fprintf (dump_file, "\n");
4830 }
4831
4832 something_changed = true;
4833 if (maybe_clean_eh_stmt (stmt)
4834 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4835 cfg_changed = true;
4836 }
4837
4838 (*ipa_node_agg_replacements)[node->uid] = NULL;
4839 free_parms_ainfo (parms_ainfo, param_count);
4840 descriptors.release ();
4841
4842 if (!something_changed)
4843 return 0;
4844 else if (cfg_changed)
4845 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4846 else
4847 return TODO_update_ssa_only_virtuals;
4848 }