re PR c++/58477 (ice in cgraph_speculative_call_info)
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60
61 /* Intermediate information about a parameter that is only useful during the
62 run of ipa_analyze_node and is not kept afterwards. */
63
64 struct param_analysis_info
65 {
66 bool parm_modified, ref_modified, pt_modified;
67 bitmap parm_visited_statements, pt_visited_statements;
68 };
69
70 /* Vector where the parameter infos are actually stored. */
71 vec<ipa_node_params> ipa_node_params_vector;
72 /* Vector of known aggregate values in cloned nodes. */
73 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
74 /* Vector where the parameter infos are actually stored. */
75 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
76
77 /* Holders of ipa cgraph hooks: */
78 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
79 static struct cgraph_node_hook_list *node_removal_hook_holder;
80 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
81 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
82 static struct cgraph_node_hook_list *function_insertion_hook_holder;
83
84 /* Description of a reference to an IPA constant. */
85 struct ipa_cst_ref_desc
86 {
87 /* Edge that corresponds to the statement which took the reference. */
88 struct cgraph_edge *cs;
89 /* Linked list of duplicates created when call graph edges are cloned. */
90 struct ipa_cst_ref_desc *next_duplicate;
91 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
92 if out of control. */
93 int refcount;
94 };
95
96 /* Allocation pool for reference descriptions. */
97
98 static alloc_pool ipa_refdesc_pool;
99
100 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
101 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
102
103 static bool
104 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
105 {
106 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
107 struct cl_optimization *os;
108
109 if (!fs_opts)
110 return false;
111 os = TREE_OPTIMIZATION (fs_opts);
112 return !os->x_optimize || !os->x_flag_ipa_cp;
113 }
114
115 /* Return index of the formal whose tree is PTREE in function which corresponds
116 to INFO. */
117
118 static int
119 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
120 {
121 int i, count;
122
123 count = descriptors.length ();
124 for (i = 0; i < count; i++)
125 if (descriptors[i].decl == ptree)
126 return i;
127
128 return -1;
129 }
130
131 /* Return index of the formal whose tree is PTREE in function which corresponds
132 to INFO. */
133
134 int
135 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
136 {
137 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
138 }
139
140 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
141 NODE. */
142
143 static void
144 ipa_populate_param_decls (struct cgraph_node *node,
145 vec<ipa_param_descriptor> &descriptors)
146 {
147 tree fndecl;
148 tree fnargs;
149 tree parm;
150 int param_num;
151
152 fndecl = node->decl;
153 gcc_assert (gimple_has_body_p (fndecl));
154 fnargs = DECL_ARGUMENTS (fndecl);
155 param_num = 0;
156 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
157 {
158 descriptors[param_num].decl = parm;
159 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
160 param_num++;
161 }
162 }
163
164 /* Return how many formal parameters FNDECL has. */
165
166 static inline int
167 count_formal_params (tree fndecl)
168 {
169 tree parm;
170 int count = 0;
171 gcc_assert (gimple_has_body_p (fndecl));
172
173 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
174 count++;
175
176 return count;
177 }
178
179 /* Return the declaration of Ith formal parameter of the function corresponding
180 to INFO. Note there is no setter function as this array is built just once
181 using ipa_initialize_node_params. */
182
183 void
184 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
185 {
186 fprintf (file, "param #%i", i);
187 if (info->descriptors[i].decl)
188 {
189 fprintf (file, " ");
190 print_generic_expr (file, info->descriptors[i].decl, 0);
191 }
192 }
193
194 /* Initialize the ipa_node_params structure associated with NODE
195 to hold PARAM_COUNT parameters. */
196
197 void
198 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
199 {
200 struct ipa_node_params *info = IPA_NODE_REF (node);
201
202 if (!info->descriptors.exists () && param_count)
203 info->descriptors.safe_grow_cleared (param_count);
204 }
205
206 /* Initialize the ipa_node_params structure associated with NODE by counting
207 the function parameters, creating the descriptors and populating their
208 param_decls. */
209
210 void
211 ipa_initialize_node_params (struct cgraph_node *node)
212 {
213 struct ipa_node_params *info = IPA_NODE_REF (node);
214
215 if (!info->descriptors.exists ())
216 {
217 ipa_alloc_node_params (node, count_formal_params (node->decl));
218 ipa_populate_param_decls (node, info->descriptors);
219 }
220 }
221
222 /* Print the jump functions associated with call graph edge CS to file F. */
223
224 static void
225 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
226 {
227 int i, count;
228
229 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
230 for (i = 0; i < count; i++)
231 {
232 struct ipa_jump_func *jump_func;
233 enum jump_func_type type;
234
235 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
236 type = jump_func->type;
237
238 fprintf (f, " param %d: ", i);
239 if (type == IPA_JF_UNKNOWN)
240 fprintf (f, "UNKNOWN\n");
241 else if (type == IPA_JF_KNOWN_TYPE)
242 {
243 fprintf (f, "KNOWN TYPE: base ");
244 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
245 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
246 jump_func->value.known_type.offset);
247 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
248 fprintf (f, "\n");
249 }
250 else if (type == IPA_JF_CONST)
251 {
252 tree val = jump_func->value.constant.value;
253 fprintf (f, "CONST: ");
254 print_generic_expr (f, val, 0);
255 if (TREE_CODE (val) == ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
257 {
258 fprintf (f, " -> ");
259 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
260 0);
261 }
262 fprintf (f, "\n");
263 }
264 else if (type == IPA_JF_PASS_THROUGH)
265 {
266 fprintf (f, "PASS THROUGH: ");
267 fprintf (f, "%d, op %s",
268 jump_func->value.pass_through.formal_id,
269 get_tree_code_name(jump_func->value.pass_through.operation));
270 if (jump_func->value.pass_through.operation != NOP_EXPR)
271 {
272 fprintf (f, " ");
273 print_generic_expr (f,
274 jump_func->value.pass_through.operand, 0);
275 }
276 if (jump_func->value.pass_through.agg_preserved)
277 fprintf (f, ", agg_preserved");
278 if (jump_func->value.pass_through.type_preserved)
279 fprintf (f, ", type_preserved");
280 fprintf (f, "\n");
281 }
282 else if (type == IPA_JF_ANCESTOR)
283 {
284 fprintf (f, "ANCESTOR: ");
285 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
286 jump_func->value.ancestor.formal_id,
287 jump_func->value.ancestor.offset);
288 print_generic_expr (f, jump_func->value.ancestor.type, 0);
289 if (jump_func->value.ancestor.agg_preserved)
290 fprintf (f, ", agg_preserved");
291 if (jump_func->value.ancestor.type_preserved)
292 fprintf (f, ", type_preserved");
293 fprintf (f, "\n");
294 }
295
296 if (jump_func->agg.items)
297 {
298 struct ipa_agg_jf_item *item;
299 int j;
300
301 fprintf (f, " Aggregate passed by %s:\n",
302 jump_func->agg.by_ref ? "reference" : "value");
303 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
304 {
305 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
306 item->offset);
307 if (TYPE_P (item->value))
308 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
309 tree_to_uhwi (TYPE_SIZE (item->value)));
310 else
311 {
312 fprintf (f, "cst: ");
313 print_generic_expr (f, item->value, 0);
314 }
315 fprintf (f, "\n");
316 }
317 }
318 }
319 }
320
321
322 /* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
324
325 void
326 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
327 {
328 struct cgraph_edge *cs;
329
330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
331 node->order);
332 for (cs = node->callees; cs; cs = cs->next_callee)
333 {
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
336
337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
338 xstrdup (node->name ()), node->order,
339 xstrdup (cs->callee->name ()),
340 cs->callee->order);
341 ipa_print_node_jump_functions_for_edge (f, cs);
342 }
343
344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
345 {
346 struct cgraph_indirect_call_info *ii;
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
349
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
352 fprintf (f, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
354 ii->member_ptr ? "member ptr" : "aggregate",
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
358 fprintf (f, " indirect %s callsite, calling param %i",
359 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
360
361 if (cs->call_stmt)
362 {
363 fprintf (f, ", for stmt ");
364 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
365 }
366 else
367 fprintf (f, "\n");
368 ipa_print_node_jump_functions_for_edge (f, cs);
369 }
370 }
371
372 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
373
374 void
375 ipa_print_all_jump_functions (FILE *f)
376 {
377 struct cgraph_node *node;
378
379 fprintf (f, "\nJump functions:\n");
380 FOR_EACH_FUNCTION (node)
381 {
382 ipa_print_node_jump_functions (f, node);
383 }
384 }
385
386 /* Set JFUNC to be a known type jump function. */
387
388 static void
389 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
390 tree base_type, tree component_type)
391 {
392 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
393 && TYPE_BINFO (component_type));
394 jfunc->type = IPA_JF_KNOWN_TYPE;
395 jfunc->value.known_type.offset = offset,
396 jfunc->value.known_type.base_type = base_type;
397 jfunc->value.known_type.component_type = component_type;
398 gcc_assert (component_type);
399 }
400
401 /* Set JFUNC to be a copy of another jmp (to be used by jump function
402 combination code). The two functions will share their rdesc. */
403
404 static void
405 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
406 struct ipa_jump_func *src)
407
408 {
409 gcc_checking_assert (src->type == IPA_JF_CONST);
410 dst->type = IPA_JF_CONST;
411 dst->value.constant = src->value.constant;
412 }
413
414 /* Set JFUNC to be a constant jmp function. */
415
416 static void
417 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
418 struct cgraph_edge *cs)
419 {
420 constant = unshare_expr (constant);
421 if (constant && EXPR_P (constant))
422 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
423 jfunc->type = IPA_JF_CONST;
424 jfunc->value.constant.value = unshare_expr_without_location (constant);
425
426 if (TREE_CODE (constant) == ADDR_EXPR
427 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
428 {
429 struct ipa_cst_ref_desc *rdesc;
430 if (!ipa_refdesc_pool)
431 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
432 sizeof (struct ipa_cst_ref_desc), 32);
433
434 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
435 rdesc->cs = cs;
436 rdesc->next_duplicate = NULL;
437 rdesc->refcount = 1;
438 jfunc->value.constant.rdesc = rdesc;
439 }
440 else
441 jfunc->value.constant.rdesc = NULL;
442 }
443
444 /* Set JFUNC to be a simple pass-through jump function. */
445 static void
446 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
447 bool agg_preserved, bool type_preserved)
448 {
449 jfunc->type = IPA_JF_PASS_THROUGH;
450 jfunc->value.pass_through.operand = NULL_TREE;
451 jfunc->value.pass_through.formal_id = formal_id;
452 jfunc->value.pass_through.operation = NOP_EXPR;
453 jfunc->value.pass_through.agg_preserved = agg_preserved;
454 jfunc->value.pass_through.type_preserved = type_preserved;
455 }
456
457 /* Set JFUNC to be an arithmetic pass through jump function. */
458
459 static void
460 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
461 tree operand, enum tree_code operation)
462 {
463 jfunc->type = IPA_JF_PASS_THROUGH;
464 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
465 jfunc->value.pass_through.formal_id = formal_id;
466 jfunc->value.pass_through.operation = operation;
467 jfunc->value.pass_through.agg_preserved = false;
468 jfunc->value.pass_through.type_preserved = false;
469 }
470
471 /* Set JFUNC to be an ancestor jump function. */
472
473 static void
474 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
475 tree type, int formal_id, bool agg_preserved,
476 bool type_preserved)
477 {
478 jfunc->type = IPA_JF_ANCESTOR;
479 jfunc->value.ancestor.formal_id = formal_id;
480 jfunc->value.ancestor.offset = offset;
481 jfunc->value.ancestor.type = type;
482 jfunc->value.ancestor.agg_preserved = agg_preserved;
483 jfunc->value.ancestor.type_preserved = type_preserved;
484 }
485
486 /* Extract the acual BINFO being described by JFUNC which must be a known type
487 jump function. */
488
489 tree
490 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
491 {
492 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
493 if (!base_binfo)
494 return NULL_TREE;
495 return get_binfo_at_offset (base_binfo,
496 jfunc->value.known_type.offset,
497 jfunc->value.known_type.component_type);
498 }
499
500 /* Structure to be passed in between detect_type_change and
501 check_stmt_for_type_change. */
502
503 struct type_change_info
504 {
505 /* Offset into the object where there is the virtual method pointer we are
506 looking for. */
507 HOST_WIDE_INT offset;
508 /* The declaration or SSA_NAME pointer of the base that we are checking for
509 type change. */
510 tree object;
511 /* If we actually can tell the type that the object has changed to, it is
512 stored in this field. Otherwise it remains NULL_TREE. */
513 tree known_current_type;
514 /* Set to true if dynamic type change has been detected. */
515 bool type_maybe_changed;
516 /* Set to true if multiple types have been encountered. known_current_type
517 must be disregarded in that case. */
518 bool multiple_types_encountered;
519 };
520
521 /* Return true if STMT can modify a virtual method table pointer.
522
523 This function makes special assumptions about both constructors and
524 destructors which are all the functions that are allowed to alter the VMT
525 pointers. It assumes that destructors begin with assignment into all VMT
526 pointers and that constructors essentially look in the following way:
527
528 1) The very first thing they do is that they call constructors of ancestor
529 sub-objects that have them.
530
531 2) Then VMT pointers of this and all its ancestors is set to new values
532 corresponding to the type corresponding to the constructor.
533
534 3) Only afterwards, other stuff such as constructor of member sub-objects
535 and the code written by the user is run. Only this may include calling
536 virtual functions, directly or indirectly.
537
538 There is no way to call a constructor of an ancestor sub-object in any
539 other way.
540
541 This means that we do not have to care whether constructors get the correct
542 type information because they will always change it (in fact, if we define
543 the type to be given by the VMT pointer, it is undefined).
544
545 The most important fact to derive from the above is that if, for some
546 statement in the section 3, we try to detect whether the dynamic type has
547 changed, we can safely ignore all calls as we examine the function body
548 backwards until we reach statements in section 2 because these calls cannot
549 be ancestor constructors or destructors (if the input is not bogus) and so
550 do not change the dynamic type (this holds true only for automatically
551 allocated objects but at the moment we devirtualize only these). We then
552 must detect that statements in section 2 change the dynamic type and can try
553 to derive the new type. That is enough and we can stop, we will never see
554 the calls into constructors of sub-objects in this code. Therefore we can
555 safely ignore all call statements that we traverse.
556 */
557
558 static bool
559 stmt_may_be_vtbl_ptr_store (gimple stmt)
560 {
561 if (is_gimple_call (stmt))
562 return false;
563 else if (gimple_clobber_p (stmt))
564 return false;
565 else if (is_gimple_assign (stmt))
566 {
567 tree lhs = gimple_assign_lhs (stmt);
568
569 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
570 {
571 if (flag_strict_aliasing
572 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
573 return false;
574
575 if (TREE_CODE (lhs) == COMPONENT_REF
576 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
577 return false;
578 /* In the future we might want to use get_base_ref_and_offset to find
579 if there is a field corresponding to the offset and if so, proceed
580 almost like if it was a component ref. */
581 }
582 }
583 return true;
584 }
585
586 /* If STMT can be proved to be an assignment to the virtual method table
587 pointer of ANALYZED_OBJ and the type associated with the new table
588 identified, return the type. Otherwise return NULL_TREE. */
589
590 static tree
591 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
592 {
593 HOST_WIDE_INT offset, size, max_size;
594 tree lhs, rhs, base;
595
596 if (!gimple_assign_single_p (stmt))
597 return NULL_TREE;
598
599 lhs = gimple_assign_lhs (stmt);
600 rhs = gimple_assign_rhs1 (stmt);
601 if (TREE_CODE (lhs) != COMPONENT_REF
602 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
603 || TREE_CODE (rhs) != ADDR_EXPR)
604 return NULL_TREE;
605 rhs = get_base_address (TREE_OPERAND (rhs, 0));
606 if (!rhs
607 || TREE_CODE (rhs) != VAR_DECL
608 || !DECL_VIRTUAL_P (rhs))
609 return NULL_TREE;
610
611 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
612 if (offset != tci->offset
613 || size != POINTER_SIZE
614 || max_size != POINTER_SIZE)
615 return NULL_TREE;
616 if (TREE_CODE (base) == MEM_REF)
617 {
618 if (TREE_CODE (tci->object) != MEM_REF
619 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
620 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
621 TREE_OPERAND (base, 1)))
622 return NULL_TREE;
623 }
624 else if (tci->object != base)
625 return NULL_TREE;
626
627 return DECL_CONTEXT (rhs);
628 }
629
630 /* Callback of walk_aliased_vdefs and a helper function for
631 detect_type_change to check whether a particular statement may modify
632 the virtual table pointer, and if possible also determine the new type of
633 the (sub-)object. It stores its result into DATA, which points to a
634 type_change_info structure. */
635
636 static bool
637 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
638 {
639 gimple stmt = SSA_NAME_DEF_STMT (vdef);
640 struct type_change_info *tci = (struct type_change_info *) data;
641
642 if (stmt_may_be_vtbl_ptr_store (stmt))
643 {
644 tree type;
645 type = extr_type_from_vtbl_ptr_store (stmt, tci);
646 if (tci->type_maybe_changed
647 && type != tci->known_current_type)
648 tci->multiple_types_encountered = true;
649 tci->known_current_type = type;
650 tci->type_maybe_changed = true;
651 return true;
652 }
653 else
654 return false;
655 }
656
657
658
659 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
660 callsite CALL) by looking for assignments to its virtual table pointer. If
661 it is, return true and fill in the jump function JFUNC with relevant type
662 information or set it to unknown. ARG is the object itself (not a pointer
663 to it, unless dereferenced). BASE is the base of the memory access as
664 returned by get_ref_base_and_extent, as is the offset. */
665
666 static bool
667 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
668 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
669 {
670 struct type_change_info tci;
671 ao_ref ao;
672
673 gcc_checking_assert (DECL_P (arg)
674 || TREE_CODE (arg) == MEM_REF
675 || handled_component_p (arg));
676 /* Const calls cannot call virtual methods through VMT and so type changes do
677 not matter. */
678 if (!flag_devirtualize || !gimple_vuse (call)
679 /* Be sure expected_type is polymorphic. */
680 || !comp_type
681 || TREE_CODE (comp_type) != RECORD_TYPE
682 || !TYPE_BINFO (comp_type)
683 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
684 return false;
685
686 ao_ref_init (&ao, arg);
687 ao.base = base;
688 ao.offset = offset;
689 ao.size = POINTER_SIZE;
690 ao.max_size = ao.size;
691
692 tci.offset = offset;
693 tci.object = get_base_address (arg);
694 tci.known_current_type = NULL_TREE;
695 tci.type_maybe_changed = false;
696 tci.multiple_types_encountered = false;
697
698 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
699 &tci, NULL);
700 if (!tci.type_maybe_changed)
701 return false;
702
703 if (!tci.known_current_type
704 || tci.multiple_types_encountered
705 || offset != 0)
706 jfunc->type = IPA_JF_UNKNOWN;
707 else
708 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
709
710 return true;
711 }
712
713 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
714 SSA name (its dereference will become the base and the offset is assumed to
715 be zero). */
716
717 static bool
718 detect_type_change_ssa (tree arg, tree comp_type,
719 gimple call, struct ipa_jump_func *jfunc)
720 {
721 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
722 if (!flag_devirtualize
723 || !POINTER_TYPE_P (TREE_TYPE (arg)))
724 return false;
725
726 arg = build2 (MEM_REF, ptr_type_node, arg,
727 build_int_cst (ptr_type_node, 0));
728
729 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
730 }
731
732 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
733 boolean variable pointed to by DATA. */
734
735 static bool
736 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
737 void *data)
738 {
739 bool *b = (bool *) data;
740 *b = true;
741 return true;
742 }
743
744 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
745 a value known not to be modified in this function before reaching the
746 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
747 information about the parameter. */
748
749 static bool
750 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
751 gimple stmt, tree parm_load)
752 {
753 bool modified = false;
754 bitmap *visited_stmts;
755 ao_ref refd;
756
757 if (parm_ainfo && parm_ainfo->parm_modified)
758 return false;
759
760 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
761 ao_ref_init (&refd, parm_load);
762 /* We can cache visited statements only when parm_ainfo is available and when
763 we are looking at a naked load of the whole parameter. */
764 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
765 visited_stmts = NULL;
766 else
767 visited_stmts = &parm_ainfo->parm_visited_statements;
768 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
769 visited_stmts);
770 if (parm_ainfo && modified)
771 parm_ainfo->parm_modified = true;
772 return !modified;
773 }
774
775 /* If STMT is an assignment that loads a value from an parameter declaration,
776 return the index of the parameter in ipa_node_params which has not been
777 modified. Otherwise return -1. */
778
779 static int
780 load_from_unmodified_param (vec<ipa_param_descriptor> descriptors,
781 struct param_analysis_info *parms_ainfo,
782 gimple stmt)
783 {
784 int index;
785 tree op1;
786
787 if (!gimple_assign_single_p (stmt))
788 return -1;
789
790 op1 = gimple_assign_rhs1 (stmt);
791 if (TREE_CODE (op1) != PARM_DECL)
792 return -1;
793
794 index = ipa_get_param_decl_index_1 (descriptors, op1);
795 if (index < 0
796 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
797 : NULL, stmt, op1))
798 return -1;
799
800 return index;
801 }
802
803 /* Return true if memory reference REF loads data that are known to be
804 unmodified in this function before reaching statement STMT. PARM_AINFO, if
805 non-NULL, is a pointer to a structure containing temporary information about
806 PARM. */
807
808 static bool
809 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
810 gimple stmt, tree ref)
811 {
812 bool modified = false;
813 ao_ref refd;
814
815 gcc_checking_assert (gimple_vuse (stmt));
816 if (parm_ainfo && parm_ainfo->ref_modified)
817 return false;
818
819 ao_ref_init (&refd, ref);
820 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
821 NULL);
822 if (parm_ainfo && modified)
823 parm_ainfo->ref_modified = true;
824 return !modified;
825 }
826
827 /* Return true if the data pointed to by PARM is known to be unmodified in this
828 function before reaching call statement CALL into which it is passed.
829 PARM_AINFO is a pointer to a structure containing temporary information
830 about PARM. */
831
832 static bool
833 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
834 gimple call, tree parm)
835 {
836 bool modified = false;
837 ao_ref refd;
838
839 /* It's unnecessary to calculate anything about memory contnets for a const
840 function because it is not goin to use it. But do not cache the result
841 either. Also, no such calculations for non-pointers. */
842 if (!gimple_vuse (call)
843 || !POINTER_TYPE_P (TREE_TYPE (parm)))
844 return false;
845
846 if (parm_ainfo->pt_modified)
847 return false;
848
849 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
850 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
851 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
852 if (modified)
853 parm_ainfo->pt_modified = true;
854 return !modified;
855 }
856
857 /* Return true if we can prove that OP is a memory reference loading unmodified
858 data from an aggregate passed as a parameter and if the aggregate is passed
859 by reference, that the alias type of the load corresponds to the type of the
860 formal parameter (so that we can rely on this type for TBAA in callers).
861 INFO and PARMS_AINFO describe parameters of the current function (but the
862 latter can be NULL), STMT is the load statement. If function returns true,
863 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
864 within the aggregate and whether it is a load from a value passed by
865 reference respectively. */
866
867 static bool
868 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor> descriptors,
869 struct param_analysis_info *parms_ainfo, gimple stmt,
870 tree op, int *index_p, HOST_WIDE_INT *offset_p,
871 HOST_WIDE_INT *size_p, bool *by_ref_p)
872 {
873 int index;
874 HOST_WIDE_INT size, max_size;
875 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
876
877 if (max_size == -1 || max_size != size || *offset_p < 0)
878 return false;
879
880 if (DECL_P (base))
881 {
882 int index = ipa_get_param_decl_index_1 (descriptors, base);
883 if (index >= 0
884 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
885 : NULL, stmt, op))
886 {
887 *index_p = index;
888 *by_ref_p = false;
889 if (size_p)
890 *size_p = size;
891 return true;
892 }
893 return false;
894 }
895
896 if (TREE_CODE (base) != MEM_REF
897 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
898 || !integer_zerop (TREE_OPERAND (base, 1)))
899 return false;
900
901 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
902 {
903 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
904 index = ipa_get_param_decl_index_1 (descriptors, parm);
905 }
906 else
907 {
908 /* This branch catches situations where a pointer parameter is not a
909 gimple register, for example:
910
911 void hip7(S*) (struct S * p)
912 {
913 void (*<T2e4>) (struct S *) D.1867;
914 struct S * p.1;
915
916 <bb 2>:
917 p.1_1 = p;
918 D.1867_2 = p.1_1->f;
919 D.1867_2 ();
920 gdp = &p;
921 */
922
923 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
924 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
925 }
926
927 if (index >= 0
928 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
929 stmt, op))
930 {
931 *index_p = index;
932 *by_ref_p = true;
933 if (size_p)
934 *size_p = size;
935 return true;
936 }
937 return false;
938 }
939
940 /* Just like the previous function, just without the param_analysis_info
941 pointer, for users outside of this file. */
942
943 bool
944 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
945 tree op, int *index_p, HOST_WIDE_INT *offset_p,
946 bool *by_ref_p)
947 {
948 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
949 offset_p, NULL, by_ref_p);
950 }
951
952 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
953 of an assignment statement STMT, try to determine whether we are actually
954 handling any of the following cases and construct an appropriate jump
955 function into JFUNC if so:
956
957 1) The passed value is loaded from a formal parameter which is not a gimple
958 register (most probably because it is addressable, the value has to be
959 scalar) and we can guarantee the value has not changed. This case can
960 therefore be described by a simple pass-through jump function. For example:
961
962 foo (int a)
963 {
964 int a.0;
965
966 a.0_2 = a;
967 bar (a.0_2);
968
969 2) The passed value can be described by a simple arithmetic pass-through
970 jump function. E.g.
971
972 foo (int a)
973 {
974 int D.2064;
975
976 D.2064_4 = a.1(D) + 4;
977 bar (D.2064_4);
978
979 This case can also occur in combination of the previous one, e.g.:
980
981 foo (int a, int z)
982 {
983 int a.0;
984 int D.2064;
985
986 a.0_3 = a;
987 D.2064_4 = a.0_3 + 4;
988 foo (D.2064_4);
989
990 3) The passed value is an address of an object within another one (which
991 also passed by reference). Such situations are described by an ancestor
992 jump function and describe situations such as:
993
994 B::foo() (struct B * const this)
995 {
996 struct A * D.1845;
997
998 D.1845_2 = &this_1(D)->D.1748;
999 A::bar (D.1845_2);
1000
1001 INFO is the structure describing individual parameters access different
1002 stages of IPA optimizations. PARMS_AINFO contains the information that is
1003 only needed for intraprocedural analysis. */
1004
1005 static void
1006 compute_complex_assign_jump_func (struct ipa_node_params *info,
1007 struct param_analysis_info *parms_ainfo,
1008 struct ipa_jump_func *jfunc,
1009 gimple call, gimple stmt, tree name,
1010 tree param_type)
1011 {
1012 HOST_WIDE_INT offset, size, max_size;
1013 tree op1, tc_ssa, base, ssa;
1014 int index;
1015
1016 op1 = gimple_assign_rhs1 (stmt);
1017
1018 if (TREE_CODE (op1) == SSA_NAME)
1019 {
1020 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1021 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1022 else
1023 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
1024 SSA_NAME_DEF_STMT (op1));
1025 tc_ssa = op1;
1026 }
1027 else
1028 {
1029 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1030 tc_ssa = gimple_assign_lhs (stmt);
1031 }
1032
1033 if (index >= 0)
1034 {
1035 tree op2 = gimple_assign_rhs2 (stmt);
1036
1037 if (op2)
1038 {
1039 if (!is_gimple_ip_invariant (op2)
1040 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1041 && !useless_type_conversion_p (TREE_TYPE (name),
1042 TREE_TYPE (op1))))
1043 return;
1044
1045 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1046 gimple_assign_rhs_code (stmt));
1047 }
1048 else if (gimple_assign_single_p (stmt))
1049 {
1050 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1051 call, tc_ssa);
1052 bool type_p = false;
1053
1054 if (param_type && POINTER_TYPE_P (param_type))
1055 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1056 call, jfunc);
1057 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1058 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1059 }
1060 return;
1061 }
1062
1063 if (TREE_CODE (op1) != ADDR_EXPR)
1064 return;
1065 op1 = TREE_OPERAND (op1, 0);
1066 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1067 return;
1068 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1069 if (TREE_CODE (base) != MEM_REF
1070 /* If this is a varying address, punt. */
1071 || max_size == -1
1072 || max_size != size)
1073 return;
1074 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1075 ssa = TREE_OPERAND (base, 0);
1076 if (TREE_CODE (ssa) != SSA_NAME
1077 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1078 || offset < 0)
1079 return;
1080
1081 /* Dynamic types are changed in constructors and destructors. */
1082 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1083 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1084 {
1085 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1086 call, jfunc, offset);
1087 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1088 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1089 parm_ref_data_pass_through_p (&parms_ainfo[index],
1090 call, ssa), type_p);
1091 }
1092 }
1093
1094 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1095 it looks like:
1096
1097 iftmp.1_3 = &obj_2(D)->D.1762;
1098
1099 The base of the MEM_REF must be a default definition SSA NAME of a
1100 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1101 whole MEM_REF expression is returned and the offset calculated from any
1102 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1103 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1104
1105 static tree
1106 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1107 {
1108 HOST_WIDE_INT size, max_size;
1109 tree expr, parm, obj;
1110
1111 if (!gimple_assign_single_p (assign))
1112 return NULL_TREE;
1113 expr = gimple_assign_rhs1 (assign);
1114
1115 if (TREE_CODE (expr) != ADDR_EXPR)
1116 return NULL_TREE;
1117 expr = TREE_OPERAND (expr, 0);
1118 obj = expr;
1119 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1120
1121 if (TREE_CODE (expr) != MEM_REF
1122 /* If this is a varying address, punt. */
1123 || max_size == -1
1124 || max_size != size
1125 || *offset < 0)
1126 return NULL_TREE;
1127 parm = TREE_OPERAND (expr, 0);
1128 if (TREE_CODE (parm) != SSA_NAME
1129 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1130 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1131 return NULL_TREE;
1132
1133 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1134 *obj_p = obj;
1135 return expr;
1136 }
1137
1138
1139 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1140 statement PHI, try to find out whether NAME is in fact a
1141 multiple-inheritance typecast from a descendant into an ancestor of a formal
1142 parameter and thus can be described by an ancestor jump function and if so,
1143 write the appropriate function into JFUNC.
1144
1145 Essentially we want to match the following pattern:
1146
1147 if (obj_2(D) != 0B)
1148 goto <bb 3>;
1149 else
1150 goto <bb 4>;
1151
1152 <bb 3>:
1153 iftmp.1_3 = &obj_2(D)->D.1762;
1154
1155 <bb 4>:
1156 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1157 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1158 return D.1879_6; */
1159
1160 static void
1161 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1162 struct param_analysis_info *parms_ainfo,
1163 struct ipa_jump_func *jfunc,
1164 gimple call, gimple phi, tree param_type)
1165 {
1166 HOST_WIDE_INT offset;
1167 gimple assign, cond;
1168 basic_block phi_bb, assign_bb, cond_bb;
1169 tree tmp, parm, expr, obj;
1170 int index, i;
1171
1172 if (gimple_phi_num_args (phi) != 2)
1173 return;
1174
1175 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1176 tmp = PHI_ARG_DEF (phi, 0);
1177 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1178 tmp = PHI_ARG_DEF (phi, 1);
1179 else
1180 return;
1181 if (TREE_CODE (tmp) != SSA_NAME
1182 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1183 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1184 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1185 return;
1186
1187 assign = SSA_NAME_DEF_STMT (tmp);
1188 assign_bb = gimple_bb (assign);
1189 if (!single_pred_p (assign_bb))
1190 return;
1191 expr = get_ancestor_addr_info (assign, &obj, &offset);
1192 if (!expr)
1193 return;
1194 parm = TREE_OPERAND (expr, 0);
1195 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1196 gcc_assert (index >= 0);
1197
1198 cond_bb = single_pred (assign_bb);
1199 cond = last_stmt (cond_bb);
1200 if (!cond
1201 || gimple_code (cond) != GIMPLE_COND
1202 || gimple_cond_code (cond) != NE_EXPR
1203 || gimple_cond_lhs (cond) != parm
1204 || !integer_zerop (gimple_cond_rhs (cond)))
1205 return;
1206
1207 phi_bb = gimple_bb (phi);
1208 for (i = 0; i < 2; i++)
1209 {
1210 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1211 if (pred != assign_bb && pred != cond_bb)
1212 return;
1213 }
1214
1215 bool type_p = false;
1216 if (param_type && POINTER_TYPE_P (param_type))
1217 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1218 call, jfunc, offset);
1219 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1220 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1221 parm_ref_data_pass_through_p (&parms_ainfo[index],
1222 call, parm), type_p);
1223 }
1224
1225 /* Given OP which is passed as an actual argument to a called function,
1226 determine if it is possible to construct a KNOWN_TYPE jump function for it
1227 and if so, create one and store it to JFUNC.
1228 EXPECTED_TYPE represents a type the argument should be in */
1229
1230 static void
1231 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1232 gimple call, tree expected_type)
1233 {
1234 HOST_WIDE_INT offset, size, max_size;
1235 tree base;
1236
1237 if (!flag_devirtualize
1238 || TREE_CODE (op) != ADDR_EXPR
1239 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1240 /* Be sure expected_type is polymorphic. */
1241 || !expected_type
1242 || TREE_CODE (expected_type) != RECORD_TYPE
1243 || !TYPE_BINFO (expected_type)
1244 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1245 return;
1246
1247 op = TREE_OPERAND (op, 0);
1248 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1249 if (!DECL_P (base)
1250 || max_size == -1
1251 || max_size != size
1252 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1253 || is_global_var (base))
1254 return;
1255
1256 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1257 return;
1258
1259 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1260 expected_type);
1261 }
1262
1263 /* Inspect the given TYPE and return true iff it has the same structure (the
1264 same number of fields of the same types) as a C++ member pointer. If
1265 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1266 corresponding fields there. */
1267
1268 static bool
1269 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1270 {
1271 tree fld;
1272
1273 if (TREE_CODE (type) != RECORD_TYPE)
1274 return false;
1275
1276 fld = TYPE_FIELDS (type);
1277 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1278 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1279 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1280 return false;
1281
1282 if (method_ptr)
1283 *method_ptr = fld;
1284
1285 fld = DECL_CHAIN (fld);
1286 if (!fld || INTEGRAL_TYPE_P (fld)
1287 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1288 return false;
1289 if (delta)
1290 *delta = fld;
1291
1292 if (DECL_CHAIN (fld))
1293 return false;
1294
1295 return true;
1296 }
1297
1298 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1299 return the rhs of its defining statement. Otherwise return RHS as it
1300 is. */
1301
1302 static inline tree
1303 get_ssa_def_if_simple_copy (tree rhs)
1304 {
1305 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1306 {
1307 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1308
1309 if (gimple_assign_single_p (def_stmt))
1310 rhs = gimple_assign_rhs1 (def_stmt);
1311 else
1312 break;
1313 }
1314 return rhs;
1315 }
1316
1317 /* Simple linked list, describing known contents of an aggregate beforere
1318 call. */
1319
1320 struct ipa_known_agg_contents_list
1321 {
1322 /* Offset and size of the described part of the aggregate. */
1323 HOST_WIDE_INT offset, size;
1324 /* Known constant value or NULL if the contents is known to be unknown. */
1325 tree constant;
1326 /* Pointer to the next structure in the list. */
1327 struct ipa_known_agg_contents_list *next;
1328 };
1329
1330 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1331 in ARG is filled in with constant values. ARG can either be an aggregate
1332 expression or a pointer to an aggregate. JFUNC is the jump function into
1333 which the constants are subsequently stored. */
1334
1335 static void
1336 determine_known_aggregate_parts (gimple call, tree arg,
1337 struct ipa_jump_func *jfunc)
1338 {
1339 struct ipa_known_agg_contents_list *list = NULL;
1340 int item_count = 0, const_count = 0;
1341 HOST_WIDE_INT arg_offset, arg_size;
1342 gimple_stmt_iterator gsi;
1343 tree arg_base;
1344 bool check_ref, by_ref;
1345 ao_ref r;
1346
1347 /* The function operates in three stages. First, we prepare check_ref, r,
1348 arg_base and arg_offset based on what is actually passed as an actual
1349 argument. */
1350
1351 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1352 {
1353 by_ref = true;
1354 if (TREE_CODE (arg) == SSA_NAME)
1355 {
1356 tree type_size;
1357 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)))))
1358 return;
1359 check_ref = true;
1360 arg_base = arg;
1361 arg_offset = 0;
1362 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1363 arg_size = tree_to_uhwi (type_size);
1364 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1365 }
1366 else if (TREE_CODE (arg) == ADDR_EXPR)
1367 {
1368 HOST_WIDE_INT arg_max_size;
1369
1370 arg = TREE_OPERAND (arg, 0);
1371 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1372 &arg_max_size);
1373 if (arg_max_size == -1
1374 || arg_max_size != arg_size
1375 || arg_offset < 0)
1376 return;
1377 if (DECL_P (arg_base))
1378 {
1379 tree size;
1380 check_ref = false;
1381 size = build_int_cst (integer_type_node, arg_size);
1382 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1383 }
1384 else
1385 return;
1386 }
1387 else
1388 return;
1389 }
1390 else
1391 {
1392 HOST_WIDE_INT arg_max_size;
1393
1394 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1395
1396 by_ref = false;
1397 check_ref = false;
1398 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1399 &arg_max_size);
1400 if (arg_max_size == -1
1401 || arg_max_size != arg_size
1402 || arg_offset < 0)
1403 return;
1404
1405 ao_ref_init (&r, arg);
1406 }
1407
1408 /* Second stage walks back the BB, looks at individual statements and as long
1409 as it is confident of how the statements affect contents of the
1410 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1411 describing it. */
1412 gsi = gsi_for_stmt (call);
1413 gsi_prev (&gsi);
1414 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1415 {
1416 struct ipa_known_agg_contents_list *n, **p;
1417 gimple stmt = gsi_stmt (gsi);
1418 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1419 tree lhs, rhs, lhs_base;
1420 bool partial_overlap;
1421
1422 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1423 continue;
1424 if (!gimple_assign_single_p (stmt))
1425 break;
1426
1427 lhs = gimple_assign_lhs (stmt);
1428 rhs = gimple_assign_rhs1 (stmt);
1429 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1430 || TREE_CODE (lhs) == BIT_FIELD_REF
1431 || contains_bitfld_component_ref_p (lhs))
1432 break;
1433
1434 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1435 &lhs_max_size);
1436 if (lhs_max_size == -1
1437 || lhs_max_size != lhs_size
1438 || (lhs_offset < arg_offset
1439 && lhs_offset + lhs_size > arg_offset)
1440 || (lhs_offset < arg_offset + arg_size
1441 && lhs_offset + lhs_size > arg_offset + arg_size))
1442 break;
1443
1444 if (check_ref)
1445 {
1446 if (TREE_CODE (lhs_base) != MEM_REF
1447 || TREE_OPERAND (lhs_base, 0) != arg_base
1448 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1449 break;
1450 }
1451 else if (lhs_base != arg_base)
1452 {
1453 if (DECL_P (lhs_base))
1454 continue;
1455 else
1456 break;
1457 }
1458
1459 if (lhs_offset + lhs_size < arg_offset
1460 || lhs_offset >= (arg_offset + arg_size))
1461 continue;
1462
1463 partial_overlap = false;
1464 p = &list;
1465 while (*p && (*p)->offset < lhs_offset)
1466 {
1467 if ((*p)->offset + (*p)->size > lhs_offset)
1468 {
1469 partial_overlap = true;
1470 break;
1471 }
1472 p = &(*p)->next;
1473 }
1474 if (partial_overlap)
1475 break;
1476 if (*p && (*p)->offset < lhs_offset + lhs_size)
1477 {
1478 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1479 /* We already know this value is subsequently overwritten with
1480 something else. */
1481 continue;
1482 else
1483 /* Otherwise this is a partial overlap which we cannot
1484 represent. */
1485 break;
1486 }
1487
1488 rhs = get_ssa_def_if_simple_copy (rhs);
1489 n = XALLOCA (struct ipa_known_agg_contents_list);
1490 n->size = lhs_size;
1491 n->offset = lhs_offset;
1492 if (is_gimple_ip_invariant (rhs))
1493 {
1494 n->constant = rhs;
1495 const_count++;
1496 }
1497 else
1498 n->constant = NULL_TREE;
1499 n->next = *p;
1500 *p = n;
1501
1502 item_count++;
1503 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1504 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1505 break;
1506 }
1507
1508 /* Third stage just goes over the list and creates an appropriate vector of
1509 ipa_agg_jf_item structures out of it, of sourse only if there are
1510 any known constants to begin with. */
1511
1512 if (const_count)
1513 {
1514 jfunc->agg.by_ref = by_ref;
1515 vec_alloc (jfunc->agg.items, const_count);
1516 while (list)
1517 {
1518 if (list->constant)
1519 {
1520 struct ipa_agg_jf_item item;
1521 item.offset = list->offset - arg_offset;
1522 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1523 item.value = unshare_expr_without_location (list->constant);
1524 jfunc->agg.items->quick_push (item);
1525 }
1526 list = list->next;
1527 }
1528 }
1529 }
1530
1531 static tree
1532 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1533 {
1534 int n;
1535 tree type = (e->callee
1536 ? TREE_TYPE (e->callee->decl)
1537 : gimple_call_fntype (e->call_stmt));
1538 tree t = TYPE_ARG_TYPES (type);
1539
1540 for (n = 0; n < i; n++)
1541 {
1542 if (!t)
1543 break;
1544 t = TREE_CHAIN (t);
1545 }
1546 if (t)
1547 return TREE_VALUE (t);
1548 if (!e->callee)
1549 return NULL;
1550 t = DECL_ARGUMENTS (e->callee->decl);
1551 for (n = 0; n < i; n++)
1552 {
1553 if (!t)
1554 return NULL;
1555 t = TREE_CHAIN (t);
1556 }
1557 if (t)
1558 return TREE_TYPE (t);
1559 return NULL;
1560 }
1561
1562 /* Compute jump function for all arguments of callsite CS and insert the
1563 information in the jump_functions array in the ipa_edge_args corresponding
1564 to this callsite. */
1565
1566 static void
1567 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1568 struct cgraph_edge *cs)
1569 {
1570 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1571 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1572 gimple call = cs->call_stmt;
1573 int n, arg_num = gimple_call_num_args (call);
1574
1575 if (arg_num == 0 || args->jump_functions)
1576 return;
1577 vec_safe_grow_cleared (args->jump_functions, arg_num);
1578
1579 if (gimple_call_internal_p (call))
1580 return;
1581 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1582 return;
1583
1584 for (n = 0; n < arg_num; n++)
1585 {
1586 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1587 tree arg = gimple_call_arg (call, n);
1588 tree param_type = ipa_get_callee_param_type (cs, n);
1589
1590 if (is_gimple_ip_invariant (arg))
1591 ipa_set_jf_constant (jfunc, arg, cs);
1592 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1593 && TREE_CODE (arg) == PARM_DECL)
1594 {
1595 int index = ipa_get_param_decl_index (info, arg);
1596
1597 gcc_assert (index >=0);
1598 /* Aggregate passed by value, check for pass-through, otherwise we
1599 will attempt to fill in aggregate contents later in this
1600 for cycle. */
1601 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1602 {
1603 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1604 continue;
1605 }
1606 }
1607 else if (TREE_CODE (arg) == SSA_NAME)
1608 {
1609 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1610 {
1611 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1612 if (index >= 0)
1613 {
1614 bool agg_p, type_p;
1615 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1616 call, arg);
1617 if (param_type && POINTER_TYPE_P (param_type))
1618 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1619 call, jfunc);
1620 else
1621 type_p = false;
1622 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1623 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1624 type_p);
1625 }
1626 }
1627 else
1628 {
1629 gimple stmt = SSA_NAME_DEF_STMT (arg);
1630 if (is_gimple_assign (stmt))
1631 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1632 call, stmt, arg, param_type);
1633 else if (gimple_code (stmt) == GIMPLE_PHI)
1634 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1635 call, stmt, param_type);
1636 }
1637 }
1638 else
1639 compute_known_type_jump_func (arg, jfunc, call,
1640 param_type
1641 && POINTER_TYPE_P (param_type)
1642 ? TREE_TYPE (param_type)
1643 : NULL);
1644
1645 if ((jfunc->type != IPA_JF_PASS_THROUGH
1646 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1647 && (jfunc->type != IPA_JF_ANCESTOR
1648 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1649 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1650 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1651 determine_known_aggregate_parts (call, arg, jfunc);
1652 }
1653 }
1654
1655 /* Compute jump functions for all edges - both direct and indirect - outgoing
1656 from NODE. Also count the actual arguments in the process. */
1657
1658 static void
1659 ipa_compute_jump_functions (struct cgraph_node *node,
1660 struct param_analysis_info *parms_ainfo)
1661 {
1662 struct cgraph_edge *cs;
1663
1664 for (cs = node->callees; cs; cs = cs->next_callee)
1665 {
1666 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1667 NULL);
1668 /* We do not need to bother analyzing calls to unknown
1669 functions unless they may become known during lto/whopr. */
1670 if (!callee->definition && !flag_lto)
1671 continue;
1672 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1673 }
1674
1675 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1676 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1677 }
1678
1679 /* If STMT looks like a statement loading a value from a member pointer formal
1680 parameter, return that parameter and store the offset of the field to
1681 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1682 might be clobbered). If USE_DELTA, then we look for a use of the delta
1683 field rather than the pfn. */
1684
1685 static tree
1686 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1687 HOST_WIDE_INT *offset_p)
1688 {
1689 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1690
1691 if (!gimple_assign_single_p (stmt))
1692 return NULL_TREE;
1693
1694 rhs = gimple_assign_rhs1 (stmt);
1695 if (TREE_CODE (rhs) == COMPONENT_REF)
1696 {
1697 ref_field = TREE_OPERAND (rhs, 1);
1698 rhs = TREE_OPERAND (rhs, 0);
1699 }
1700 else
1701 ref_field = NULL_TREE;
1702 if (TREE_CODE (rhs) != MEM_REF)
1703 return NULL_TREE;
1704 rec = TREE_OPERAND (rhs, 0);
1705 if (TREE_CODE (rec) != ADDR_EXPR)
1706 return NULL_TREE;
1707 rec = TREE_OPERAND (rec, 0);
1708 if (TREE_CODE (rec) != PARM_DECL
1709 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1710 return NULL_TREE;
1711 ref_offset = TREE_OPERAND (rhs, 1);
1712
1713 if (use_delta)
1714 fld = delta_field;
1715 else
1716 fld = ptr_field;
1717 if (offset_p)
1718 *offset_p = int_bit_position (fld);
1719
1720 if (ref_field)
1721 {
1722 if (integer_nonzerop (ref_offset))
1723 return NULL_TREE;
1724 return ref_field == fld ? rec : NULL_TREE;
1725 }
1726 else
1727 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1728 : NULL_TREE;
1729 }
1730
1731 /* Returns true iff T is an SSA_NAME defined by a statement. */
1732
1733 static bool
1734 ipa_is_ssa_with_stmt_def (tree t)
1735 {
1736 if (TREE_CODE (t) == SSA_NAME
1737 && !SSA_NAME_IS_DEFAULT_DEF (t))
1738 return true;
1739 else
1740 return false;
1741 }
1742
1743 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1744 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1745 indirect call graph edge. */
1746
1747 static struct cgraph_edge *
1748 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1749 {
1750 struct cgraph_edge *cs;
1751
1752 cs = cgraph_edge (node, stmt);
1753 cs->indirect_info->param_index = param_index;
1754 cs->indirect_info->agg_contents = 0;
1755 cs->indirect_info->member_ptr = 0;
1756 return cs;
1757 }
1758
1759 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1760 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1761 intermediate information about each formal parameter. Currently it checks
1762 whether the call calls a pointer that is a formal parameter and if so, the
1763 parameter is marked with the called flag and an indirect call graph edge
1764 describing the call is created. This is very simple for ordinary pointers
1765 represented in SSA but not-so-nice when it comes to member pointers. The
1766 ugly part of this function does nothing more than trying to match the
1767 pattern of such a call. An example of such a pattern is the gimple dump
1768 below, the call is on the last line:
1769
1770 <bb 2>:
1771 f$__delta_5 = f.__delta;
1772 f$__pfn_24 = f.__pfn;
1773
1774 or
1775 <bb 2>:
1776 f$__delta_5 = MEM[(struct *)&f];
1777 f$__pfn_24 = MEM[(struct *)&f + 4B];
1778
1779 and a few lines below:
1780
1781 <bb 5>
1782 D.2496_3 = (int) f$__pfn_24;
1783 D.2497_4 = D.2496_3 & 1;
1784 if (D.2497_4 != 0)
1785 goto <bb 3>;
1786 else
1787 goto <bb 4>;
1788
1789 <bb 6>:
1790 D.2500_7 = (unsigned int) f$__delta_5;
1791 D.2501_8 = &S + D.2500_7;
1792 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1793 D.2503_10 = *D.2502_9;
1794 D.2504_12 = f$__pfn_24 + -1;
1795 D.2505_13 = (unsigned int) D.2504_12;
1796 D.2506_14 = D.2503_10 + D.2505_13;
1797 D.2507_15 = *D.2506_14;
1798 iftmp.11_16 = (String:: *) D.2507_15;
1799
1800 <bb 7>:
1801 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1802 D.2500_19 = (unsigned int) f$__delta_5;
1803 D.2508_20 = &S + D.2500_19;
1804 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1805
1806 Such patterns are results of simple calls to a member pointer:
1807
1808 int doprinting (int (MyString::* f)(int) const)
1809 {
1810 MyString S ("somestring");
1811
1812 return (S.*f)(4);
1813 }
1814
1815 Moreover, the function also looks for called pointers loaded from aggregates
1816 passed by value or reference. */
1817
1818 static void
1819 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1820 struct ipa_node_params *info,
1821 struct param_analysis_info *parms_ainfo,
1822 gimple call, tree target)
1823 {
1824 gimple def;
1825 tree n1, n2;
1826 gimple d1, d2;
1827 tree rec, rec2, cond;
1828 gimple branch;
1829 int index;
1830 basic_block bb, virt_bb, join;
1831 HOST_WIDE_INT offset;
1832 bool by_ref;
1833
1834 if (SSA_NAME_IS_DEFAULT_DEF (target))
1835 {
1836 tree var = SSA_NAME_VAR (target);
1837 index = ipa_get_param_decl_index (info, var);
1838 if (index >= 0)
1839 ipa_note_param_call (node, index, call);
1840 return;
1841 }
1842
1843 def = SSA_NAME_DEF_STMT (target);
1844 if (gimple_assign_single_p (def)
1845 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1846 gimple_assign_rhs1 (def), &index, &offset,
1847 NULL, &by_ref))
1848 {
1849 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1850 if (cs->indirect_info->offset != offset)
1851 cs->indirect_info->outer_type = NULL;
1852 cs->indirect_info->offset = offset;
1853 cs->indirect_info->agg_contents = 1;
1854 cs->indirect_info->by_ref = by_ref;
1855 return;
1856 }
1857
1858 /* Now we need to try to match the complex pattern of calling a member
1859 pointer. */
1860 if (gimple_code (def) != GIMPLE_PHI
1861 || gimple_phi_num_args (def) != 2
1862 || !POINTER_TYPE_P (TREE_TYPE (target))
1863 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1864 return;
1865
1866 /* First, we need to check whether one of these is a load from a member
1867 pointer that is a parameter to this function. */
1868 n1 = PHI_ARG_DEF (def, 0);
1869 n2 = PHI_ARG_DEF (def, 1);
1870 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1871 return;
1872 d1 = SSA_NAME_DEF_STMT (n1);
1873 d2 = SSA_NAME_DEF_STMT (n2);
1874
1875 join = gimple_bb (def);
1876 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1877 {
1878 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1879 return;
1880
1881 bb = EDGE_PRED (join, 0)->src;
1882 virt_bb = gimple_bb (d2);
1883 }
1884 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1885 {
1886 bb = EDGE_PRED (join, 1)->src;
1887 virt_bb = gimple_bb (d1);
1888 }
1889 else
1890 return;
1891
1892 /* Second, we need to check that the basic blocks are laid out in the way
1893 corresponding to the pattern. */
1894
1895 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1896 || single_pred (virt_bb) != bb
1897 || single_succ (virt_bb) != join)
1898 return;
1899
1900 /* Third, let's see that the branching is done depending on the least
1901 significant bit of the pfn. */
1902
1903 branch = last_stmt (bb);
1904 if (!branch || gimple_code (branch) != GIMPLE_COND)
1905 return;
1906
1907 if ((gimple_cond_code (branch) != NE_EXPR
1908 && gimple_cond_code (branch) != EQ_EXPR)
1909 || !integer_zerop (gimple_cond_rhs (branch)))
1910 return;
1911
1912 cond = gimple_cond_lhs (branch);
1913 if (!ipa_is_ssa_with_stmt_def (cond))
1914 return;
1915
1916 def = SSA_NAME_DEF_STMT (cond);
1917 if (!is_gimple_assign (def)
1918 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1919 || !integer_onep (gimple_assign_rhs2 (def)))
1920 return;
1921
1922 cond = gimple_assign_rhs1 (def);
1923 if (!ipa_is_ssa_with_stmt_def (cond))
1924 return;
1925
1926 def = SSA_NAME_DEF_STMT (cond);
1927
1928 if (is_gimple_assign (def)
1929 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1930 {
1931 cond = gimple_assign_rhs1 (def);
1932 if (!ipa_is_ssa_with_stmt_def (cond))
1933 return;
1934 def = SSA_NAME_DEF_STMT (cond);
1935 }
1936
1937 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1938 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1939 == ptrmemfunc_vbit_in_delta),
1940 NULL);
1941 if (rec != rec2)
1942 return;
1943
1944 index = ipa_get_param_decl_index (info, rec);
1945 if (index >= 0
1946 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1947 {
1948 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1949 if (cs->indirect_info->offset != offset)
1950 cs->indirect_info->outer_type = NULL;
1951 cs->indirect_info->offset = offset;
1952 cs->indirect_info->agg_contents = 1;
1953 cs->indirect_info->member_ptr = 1;
1954 }
1955
1956 return;
1957 }
1958
1959 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1960 object referenced in the expression is a formal parameter of the caller
1961 (described by INFO), create a call note for the statement. */
1962
1963 static void
1964 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1965 struct ipa_node_params *info, gimple call,
1966 tree target)
1967 {
1968 struct cgraph_edge *cs;
1969 struct cgraph_indirect_call_info *ii;
1970 struct ipa_jump_func jfunc;
1971 tree obj = OBJ_TYPE_REF_OBJECT (target);
1972 int index;
1973 HOST_WIDE_INT anc_offset;
1974
1975 if (!flag_devirtualize)
1976 return;
1977
1978 if (TREE_CODE (obj) != SSA_NAME)
1979 return;
1980
1981 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1982 {
1983 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1984 return;
1985
1986 anc_offset = 0;
1987 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1988 gcc_assert (index >= 0);
1989 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1990 call, &jfunc))
1991 return;
1992 }
1993 else
1994 {
1995 gimple stmt = SSA_NAME_DEF_STMT (obj);
1996 tree expr;
1997
1998 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1999 if (!expr)
2000 return;
2001 index = ipa_get_param_decl_index (info,
2002 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2003 gcc_assert (index >= 0);
2004 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2005 call, &jfunc, anc_offset))
2006 return;
2007 }
2008
2009 cs = ipa_note_param_call (node, index, call);
2010 ii = cs->indirect_info;
2011 ii->offset = anc_offset;
2012 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2013 ii->otr_type = obj_type_ref_class (target);
2014 ii->polymorphic = 1;
2015 }
2016
2017 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2018 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2019 containing intermediate information about each formal parameter. */
2020
2021 static void
2022 ipa_analyze_call_uses (struct cgraph_node *node,
2023 struct ipa_node_params *info,
2024 struct param_analysis_info *parms_ainfo, gimple call)
2025 {
2026 tree target = gimple_call_fn (call);
2027
2028 if (!target)
2029 return;
2030 if (TREE_CODE (target) == SSA_NAME)
2031 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2032 else if (virtual_method_call_p (target))
2033 ipa_analyze_virtual_call_uses (node, info, call, target);
2034 }
2035
2036
2037 /* Analyze the call statement STMT with respect to formal parameters (described
2038 in INFO) of caller given by NODE. Currently it only checks whether formal
2039 parameters are called. PARMS_AINFO is a pointer to a vector containing
2040 intermediate information about each formal parameter. */
2041
2042 static void
2043 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2044 struct param_analysis_info *parms_ainfo, gimple stmt)
2045 {
2046 if (is_gimple_call (stmt))
2047 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2048 }
2049
2050 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2051 If OP is a parameter declaration, mark it as used in the info structure
2052 passed in DATA. */
2053
2054 static bool
2055 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
2056 tree op, void *data)
2057 {
2058 struct ipa_node_params *info = (struct ipa_node_params *) data;
2059
2060 op = get_base_address (op);
2061 if (op
2062 && TREE_CODE (op) == PARM_DECL)
2063 {
2064 int index = ipa_get_param_decl_index (info, op);
2065 gcc_assert (index >= 0);
2066 ipa_set_param_used (info, index, true);
2067 }
2068
2069 return false;
2070 }
2071
2072 /* Scan the function body of NODE and inspect the uses of formal parameters.
2073 Store the findings in various structures of the associated ipa_node_params
2074 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2075 vector containing intermediate information about each formal parameter. */
2076
2077 static void
2078 ipa_analyze_params_uses (struct cgraph_node *node,
2079 struct param_analysis_info *parms_ainfo)
2080 {
2081 tree decl = node->decl;
2082 basic_block bb;
2083 struct function *func;
2084 gimple_stmt_iterator gsi;
2085 struct ipa_node_params *info = IPA_NODE_REF (node);
2086 int i;
2087
2088 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2089 return;
2090
2091 info->uses_analysis_done = 1;
2092 if (ipa_func_spec_opts_forbid_analysis_p (node))
2093 {
2094 for (i = 0; i < ipa_get_param_count (info); i++)
2095 {
2096 ipa_set_param_used (info, i, true);
2097 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2098 }
2099 return;
2100 }
2101
2102 for (i = 0; i < ipa_get_param_count (info); i++)
2103 {
2104 tree parm = ipa_get_param (info, i);
2105 int controlled_uses = 0;
2106
2107 /* For SSA regs see if parameter is used. For non-SSA we compute
2108 the flag during modification analysis. */
2109 if (is_gimple_reg (parm))
2110 {
2111 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2112 parm);
2113 if (ddef && !has_zero_uses (ddef))
2114 {
2115 imm_use_iterator imm_iter;
2116 use_operand_p use_p;
2117
2118 ipa_set_param_used (info, i, true);
2119 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2120 if (!is_gimple_call (USE_STMT (use_p)))
2121 {
2122 controlled_uses = IPA_UNDESCRIBED_USE;
2123 break;
2124 }
2125 else
2126 controlled_uses++;
2127 }
2128 else
2129 controlled_uses = 0;
2130 }
2131 else
2132 controlled_uses = IPA_UNDESCRIBED_USE;
2133 ipa_set_controlled_uses (info, i, controlled_uses);
2134 }
2135
2136 func = DECL_STRUCT_FUNCTION (decl);
2137 FOR_EACH_BB_FN (bb, func)
2138 {
2139 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2140 {
2141 gimple stmt = gsi_stmt (gsi);
2142
2143 if (is_gimple_debug (stmt))
2144 continue;
2145
2146 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2147 walk_stmt_load_store_addr_ops (stmt, info,
2148 visit_ref_for_mod_analysis,
2149 visit_ref_for_mod_analysis,
2150 visit_ref_for_mod_analysis);
2151 }
2152 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2153 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2154 visit_ref_for_mod_analysis,
2155 visit_ref_for_mod_analysis,
2156 visit_ref_for_mod_analysis);
2157 }
2158 }
2159
2160 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2161
2162 static void
2163 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2164 {
2165 int i;
2166
2167 for (i = 0; i < param_count; i++)
2168 {
2169 if (parms_ainfo[i].parm_visited_statements)
2170 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2171 if (parms_ainfo[i].pt_visited_statements)
2172 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2173 }
2174 }
2175
2176 /* Initialize the array describing properties of of formal parameters
2177 of NODE, analyze their uses and compute jump functions associated
2178 with actual arguments of calls from within NODE. */
2179
2180 void
2181 ipa_analyze_node (struct cgraph_node *node)
2182 {
2183 struct ipa_node_params *info;
2184 struct param_analysis_info *parms_ainfo;
2185 int param_count;
2186
2187 ipa_check_create_node_params ();
2188 ipa_check_create_edge_args ();
2189 info = IPA_NODE_REF (node);
2190 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2191 ipa_initialize_node_params (node);
2192
2193 param_count = ipa_get_param_count (info);
2194 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2195 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2196
2197 ipa_analyze_params_uses (node, parms_ainfo);
2198 ipa_compute_jump_functions (node, parms_ainfo);
2199
2200 free_parms_ainfo (parms_ainfo, param_count);
2201 pop_cfun ();
2202 }
2203
2204 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2205 attempt a type-based devirtualization. If successful, return the
2206 target function declaration, otherwise return NULL. */
2207
2208 tree
2209 ipa_intraprocedural_devirtualization (gimple call)
2210 {
2211 tree binfo, token, fndecl;
2212 struct ipa_jump_func jfunc;
2213 tree otr = gimple_call_fn (call);
2214
2215 jfunc.type = IPA_JF_UNKNOWN;
2216 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2217 call, obj_type_ref_class (otr));
2218 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2219 return NULL_TREE;
2220 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2221 if (!binfo)
2222 return NULL_TREE;
2223 token = OBJ_TYPE_REF_TOKEN (otr);
2224 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2225 binfo);
2226 #ifdef ENABLE_CHECKING
2227 if (fndecl)
2228 gcc_assert (possible_polymorphic_call_target_p
2229 (otr, cgraph_get_node (fndecl)));
2230 #endif
2231 return fndecl;
2232 }
2233
2234 /* Update the jump function DST when the call graph edge corresponding to SRC is
2235 is being inlined, knowing that DST is of type ancestor and src of known
2236 type. */
2237
2238 static void
2239 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2240 struct ipa_jump_func *dst)
2241 {
2242 HOST_WIDE_INT combined_offset;
2243 tree combined_type;
2244
2245 if (!ipa_get_jf_ancestor_type_preserved (dst))
2246 {
2247 dst->type = IPA_JF_UNKNOWN;
2248 return;
2249 }
2250
2251 combined_offset = ipa_get_jf_known_type_offset (src)
2252 + ipa_get_jf_ancestor_offset (dst);
2253 combined_type = ipa_get_jf_ancestor_type (dst);
2254
2255 ipa_set_jf_known_type (dst, combined_offset,
2256 ipa_get_jf_known_type_base_type (src),
2257 combined_type);
2258 }
2259
2260 /* Update the jump functions associated with call graph edge E when the call
2261 graph edge CS is being inlined, assuming that E->caller is already (possibly
2262 indirectly) inlined into CS->callee and that E has not been inlined. */
2263
2264 static void
2265 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2266 struct cgraph_edge *e)
2267 {
2268 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2269 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2270 int count = ipa_get_cs_argument_count (args);
2271 int i;
2272
2273 for (i = 0; i < count; i++)
2274 {
2275 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2276
2277 if (dst->type == IPA_JF_ANCESTOR)
2278 {
2279 struct ipa_jump_func *src;
2280 int dst_fid = dst->value.ancestor.formal_id;
2281
2282 /* Variable number of arguments can cause havoc if we try to access
2283 one that does not exist in the inlined edge. So make sure we
2284 don't. */
2285 if (dst_fid >= ipa_get_cs_argument_count (top))
2286 {
2287 dst->type = IPA_JF_UNKNOWN;
2288 continue;
2289 }
2290
2291 src = ipa_get_ith_jump_func (top, dst_fid);
2292
2293 if (src->agg.items
2294 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2295 {
2296 struct ipa_agg_jf_item *item;
2297 int j;
2298
2299 /* Currently we do not produce clobber aggregate jump functions,
2300 replace with merging when we do. */
2301 gcc_assert (!dst->agg.items);
2302
2303 dst->agg.items = vec_safe_copy (src->agg.items);
2304 dst->agg.by_ref = src->agg.by_ref;
2305 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2306 item->offset -= dst->value.ancestor.offset;
2307 }
2308
2309 if (src->type == IPA_JF_KNOWN_TYPE)
2310 combine_known_type_and_ancestor_jfs (src, dst);
2311 else if (src->type == IPA_JF_PASS_THROUGH
2312 && src->value.pass_through.operation == NOP_EXPR)
2313 {
2314 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2315 dst->value.ancestor.agg_preserved &=
2316 src->value.pass_through.agg_preserved;
2317 dst->value.ancestor.type_preserved &=
2318 src->value.pass_through.type_preserved;
2319 }
2320 else if (src->type == IPA_JF_ANCESTOR)
2321 {
2322 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2323 dst->value.ancestor.offset += src->value.ancestor.offset;
2324 dst->value.ancestor.agg_preserved &=
2325 src->value.ancestor.agg_preserved;
2326 dst->value.ancestor.type_preserved &=
2327 src->value.ancestor.type_preserved;
2328 }
2329 else
2330 dst->type = IPA_JF_UNKNOWN;
2331 }
2332 else if (dst->type == IPA_JF_PASS_THROUGH)
2333 {
2334 struct ipa_jump_func *src;
2335 /* We must check range due to calls with variable number of arguments
2336 and we cannot combine jump functions with operations. */
2337 if (dst->value.pass_through.operation == NOP_EXPR
2338 && (dst->value.pass_through.formal_id
2339 < ipa_get_cs_argument_count (top)))
2340 {
2341 int dst_fid = dst->value.pass_through.formal_id;
2342 src = ipa_get_ith_jump_func (top, dst_fid);
2343 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2344
2345 switch (src->type)
2346 {
2347 case IPA_JF_UNKNOWN:
2348 dst->type = IPA_JF_UNKNOWN;
2349 break;
2350 case IPA_JF_KNOWN_TYPE:
2351 ipa_set_jf_known_type (dst,
2352 ipa_get_jf_known_type_offset (src),
2353 ipa_get_jf_known_type_base_type (src),
2354 ipa_get_jf_known_type_base_type (src));
2355 break;
2356 case IPA_JF_CONST:
2357 ipa_set_jf_cst_copy (dst, src);
2358 break;
2359
2360 case IPA_JF_PASS_THROUGH:
2361 {
2362 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2363 enum tree_code operation;
2364 operation = ipa_get_jf_pass_through_operation (src);
2365
2366 if (operation == NOP_EXPR)
2367 {
2368 bool agg_p, type_p;
2369 agg_p = dst_agg_p
2370 && ipa_get_jf_pass_through_agg_preserved (src);
2371 type_p = ipa_get_jf_pass_through_type_preserved (src)
2372 && ipa_get_jf_pass_through_type_preserved (dst);
2373 ipa_set_jf_simple_pass_through (dst, formal_id,
2374 agg_p, type_p);
2375 }
2376 else
2377 {
2378 tree operand = ipa_get_jf_pass_through_operand (src);
2379 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2380 operation);
2381 }
2382 break;
2383 }
2384 case IPA_JF_ANCESTOR:
2385 {
2386 bool agg_p, type_p;
2387 agg_p = dst_agg_p
2388 && ipa_get_jf_ancestor_agg_preserved (src);
2389 type_p = ipa_get_jf_ancestor_type_preserved (src)
2390 && ipa_get_jf_pass_through_type_preserved (dst);
2391 ipa_set_ancestor_jf (dst,
2392 ipa_get_jf_ancestor_offset (src),
2393 ipa_get_jf_ancestor_type (src),
2394 ipa_get_jf_ancestor_formal_id (src),
2395 agg_p, type_p);
2396 break;
2397 }
2398 default:
2399 gcc_unreachable ();
2400 }
2401
2402 if (src->agg.items
2403 && (dst_agg_p || !src->agg.by_ref))
2404 {
2405 /* Currently we do not produce clobber aggregate jump
2406 functions, replace with merging when we do. */
2407 gcc_assert (!dst->agg.items);
2408
2409 dst->agg.by_ref = src->agg.by_ref;
2410 dst->agg.items = vec_safe_copy (src->agg.items);
2411 }
2412 }
2413 else
2414 dst->type = IPA_JF_UNKNOWN;
2415 }
2416 }
2417 }
2418
2419 /* If TARGET is an addr_expr of a function declaration, make it the destination
2420 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2421
2422 struct cgraph_edge *
2423 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2424 {
2425 struct cgraph_node *callee;
2426 struct inline_edge_summary *es = inline_edge_summary (ie);
2427 bool unreachable = false;
2428
2429 if (TREE_CODE (target) == ADDR_EXPR)
2430 target = TREE_OPERAND (target, 0);
2431 if (TREE_CODE (target) != FUNCTION_DECL)
2432 {
2433 target = canonicalize_constructor_val (target, NULL);
2434 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2435 {
2436 if (ie->indirect_info->member_ptr)
2437 /* Member pointer call that goes through a VMT lookup. */
2438 return NULL;
2439
2440 if (dump_file)
2441 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2442 " in %s/%i, making it unreachable.\n",
2443 ie->caller->name (), ie->caller->order);
2444 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2445 callee = cgraph_get_create_node (target);
2446 unreachable = true;
2447 }
2448 else
2449 callee = cgraph_get_node (target);
2450 }
2451 else
2452 callee = cgraph_get_node (target);
2453
2454 /* Because may-edges are not explicitely represented and vtable may be external,
2455 we may create the first reference to the object in the unit. */
2456 if (!callee || callee->global.inlined_to)
2457 {
2458
2459 /* We are better to ensure we can refer to it.
2460 In the case of static functions we are out of luck, since we already
2461 removed its body. In the case of public functions we may or may
2462 not introduce the reference. */
2463 if (!canonicalize_constructor_val (target, NULL)
2464 || !TREE_PUBLIC (target))
2465 {
2466 if (dump_file)
2467 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2468 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2469 xstrdup (ie->caller->name ()),
2470 ie->caller->order,
2471 xstrdup (ie->callee->name ()),
2472 ie->callee->order);
2473 return NULL;
2474 }
2475 callee = cgraph_get_create_node (target);
2476 }
2477 ipa_check_create_node_params ();
2478
2479 /* We can not make edges to inline clones. It is bug that someone removed
2480 the cgraph node too early. */
2481 gcc_assert (!callee->global.inlined_to);
2482
2483 if (dump_file && !unreachable)
2484 {
2485 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2486 "(%s/%i -> %s/%i), for stmt ",
2487 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2488 xstrdup (ie->caller->name ()),
2489 ie->caller->order,
2490 xstrdup (callee->name ()),
2491 callee->order);
2492 if (ie->call_stmt)
2493 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2494 else
2495 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2496 }
2497 ie = cgraph_make_edge_direct (ie, callee);
2498 es = inline_edge_summary (ie);
2499 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2500 - eni_size_weights.call_cost);
2501 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2502 - eni_time_weights.call_cost);
2503
2504 return ie;
2505 }
2506
2507 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2508 return NULL if there is not any. BY_REF specifies whether the value has to
2509 be passed by reference or by value. */
2510
2511 tree
2512 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2513 HOST_WIDE_INT offset, bool by_ref)
2514 {
2515 struct ipa_agg_jf_item *item;
2516 int i;
2517
2518 if (by_ref != agg->by_ref)
2519 return NULL;
2520
2521 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2522 if (item->offset == offset)
2523 {
2524 /* Currently we do not have clobber values, return NULL for them once
2525 we do. */
2526 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2527 return item->value;
2528 }
2529 return NULL;
2530 }
2531
2532 /* Remove a reference to SYMBOL from the list of references of a node given by
2533 reference description RDESC. Return true if the reference has been
2534 successfully found and removed. */
2535
2536 static bool
2537 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2538 {
2539 struct ipa_ref *to_del;
2540 struct cgraph_edge *origin;
2541
2542 origin = rdesc->cs;
2543 if (!origin)
2544 return false;
2545 to_del = ipa_find_reference (origin->caller, symbol,
2546 origin->call_stmt, origin->lto_stmt_uid);
2547 if (!to_del)
2548 return false;
2549
2550 ipa_remove_reference (to_del);
2551 if (dump_file)
2552 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2553 xstrdup (origin->caller->name ()),
2554 origin->caller->order, xstrdup (symbol->name ()));
2555 return true;
2556 }
2557
2558 /* If JFUNC has a reference description with refcount different from
2559 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2560 NULL. JFUNC must be a constant jump function. */
2561
2562 static struct ipa_cst_ref_desc *
2563 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2564 {
2565 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2566 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2567 return rdesc;
2568 else
2569 return NULL;
2570 }
2571
2572 /* If the value of constant jump function JFUNC is an address of a function
2573 declaration, return the associated call graph node. Otherwise return
2574 NULL. */
2575
2576 static cgraph_node *
2577 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2578 {
2579 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2580 tree cst = ipa_get_jf_constant (jfunc);
2581 if (TREE_CODE (cst) != ADDR_EXPR
2582 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2583 return NULL;
2584
2585 return cgraph_get_node (TREE_OPERAND (cst, 0));
2586 }
2587
2588
2589 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2590 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2591 the edge specified in the rdesc. Return false if either the symbol or the
2592 reference could not be found, otherwise return true. */
2593
2594 static bool
2595 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2596 {
2597 struct ipa_cst_ref_desc *rdesc;
2598 if (jfunc->type == IPA_JF_CONST
2599 && (rdesc = jfunc_rdesc_usable (jfunc))
2600 && --rdesc->refcount == 0)
2601 {
2602 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2603 if (!symbol)
2604 return false;
2605
2606 return remove_described_reference (symbol, rdesc);
2607 }
2608 return true;
2609 }
2610
2611 /* Try to find a destination for indirect edge IE that corresponds to a simple
2612 call or a call of a member function pointer and where the destination is a
2613 pointer formal parameter described by jump function JFUNC. If it can be
2614 determined, return the newly direct edge, otherwise return NULL.
2615 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2616
2617 static struct cgraph_edge *
2618 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2619 struct ipa_jump_func *jfunc,
2620 struct ipa_node_params *new_root_info)
2621 {
2622 struct cgraph_edge *cs;
2623 tree target;
2624 bool agg_contents = ie->indirect_info->agg_contents;
2625
2626 if (ie->indirect_info->agg_contents)
2627 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2628 ie->indirect_info->offset,
2629 ie->indirect_info->by_ref);
2630 else
2631 target = ipa_value_from_jfunc (new_root_info, jfunc);
2632 if (!target)
2633 return NULL;
2634 cs = ipa_make_edge_direct_to_target (ie, target);
2635
2636 if (cs && !agg_contents)
2637 {
2638 bool ok;
2639 gcc_checking_assert (cs->callee
2640 && (cs != ie
2641 || jfunc->type != IPA_JF_CONST
2642 || !cgraph_node_for_jfunc (jfunc)
2643 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2644 ok = try_decrement_rdesc_refcount (jfunc);
2645 gcc_checking_assert (ok);
2646 }
2647
2648 return cs;
2649 }
2650
2651 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2652 call based on a formal parameter which is described by jump function JFUNC
2653 and if it can be determined, make it direct and return the direct edge.
2654 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2655 are relative to. */
2656
2657 static struct cgraph_edge *
2658 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2659 struct ipa_jump_func *jfunc,
2660 struct ipa_node_params *new_root_info)
2661 {
2662 tree binfo, target;
2663
2664 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2665
2666 if (!binfo)
2667 return NULL;
2668
2669 if (TREE_CODE (binfo) != TREE_BINFO)
2670 {
2671 binfo = gimple_extract_devirt_binfo_from_cst
2672 (binfo, ie->indirect_info->otr_type);
2673 if (!binfo)
2674 return NULL;
2675 }
2676
2677 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2678 ie->indirect_info->otr_type);
2679 if (binfo)
2680 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2681 binfo);
2682 else
2683 return NULL;
2684
2685 if (target)
2686 {
2687 #ifdef ENABLE_CHECKING
2688 gcc_assert (possible_polymorphic_call_target_p
2689 (ie, cgraph_get_node (target)));
2690 #endif
2691 return ipa_make_edge_direct_to_target (ie, target);
2692 }
2693 else
2694 return NULL;
2695 }
2696
2697 /* Update the param called notes associated with NODE when CS is being inlined,
2698 assuming NODE is (potentially indirectly) inlined into CS->callee.
2699 Moreover, if the callee is discovered to be constant, create a new cgraph
2700 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2701 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2702
2703 static bool
2704 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2705 struct cgraph_node *node,
2706 vec<cgraph_edge_p> *new_edges)
2707 {
2708 struct ipa_edge_args *top;
2709 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2710 struct ipa_node_params *new_root_info;
2711 bool res = false;
2712
2713 ipa_check_create_edge_args ();
2714 top = IPA_EDGE_REF (cs);
2715 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2716 ? cs->caller->global.inlined_to
2717 : cs->caller);
2718
2719 for (ie = node->indirect_calls; ie; ie = next_ie)
2720 {
2721 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2722 struct ipa_jump_func *jfunc;
2723 int param_index;
2724
2725 next_ie = ie->next_callee;
2726
2727 if (ici->param_index == -1)
2728 continue;
2729
2730 /* We must check range due to calls with variable number of arguments: */
2731 if (ici->param_index >= ipa_get_cs_argument_count (top))
2732 {
2733 ici->param_index = -1;
2734 continue;
2735 }
2736
2737 param_index = ici->param_index;
2738 jfunc = ipa_get_ith_jump_func (top, param_index);
2739
2740 if (!flag_indirect_inlining)
2741 new_direct_edge = NULL;
2742 else if (ici->polymorphic)
2743 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2744 new_root_info);
2745 else
2746 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2747 new_root_info);
2748 /* If speculation was removed, then we need to do nothing. */
2749 if (new_direct_edge && new_direct_edge != ie)
2750 {
2751 new_direct_edge->indirect_inlining_edge = 1;
2752 top = IPA_EDGE_REF (cs);
2753 res = true;
2754 }
2755 else if (new_direct_edge)
2756 {
2757 new_direct_edge->indirect_inlining_edge = 1;
2758 if (new_direct_edge->call_stmt)
2759 new_direct_edge->call_stmt_cannot_inline_p
2760 = !gimple_check_call_matching_types (
2761 new_direct_edge->call_stmt,
2762 new_direct_edge->callee->decl, false);
2763 if (new_edges)
2764 {
2765 new_edges->safe_push (new_direct_edge);
2766 res = true;
2767 }
2768 top = IPA_EDGE_REF (cs);
2769 }
2770 else if (jfunc->type == IPA_JF_PASS_THROUGH
2771 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2772 {
2773 if (ici->agg_contents
2774 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2775 ici->param_index = -1;
2776 else
2777 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2778 }
2779 else if (jfunc->type == IPA_JF_ANCESTOR)
2780 {
2781 if (ici->agg_contents
2782 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2783 ici->param_index = -1;
2784 else
2785 {
2786 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2787 if (ipa_get_jf_ancestor_offset (jfunc))
2788 ici->outer_type = NULL;
2789 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2790 }
2791 }
2792 else
2793 /* Either we can find a destination for this edge now or never. */
2794 ici->param_index = -1;
2795 }
2796
2797 return res;
2798 }
2799
2800 /* Recursively traverse subtree of NODE (including node) made of inlined
2801 cgraph_edges when CS has been inlined and invoke
2802 update_indirect_edges_after_inlining on all nodes and
2803 update_jump_functions_after_inlining on all non-inlined edges that lead out
2804 of this subtree. Newly discovered indirect edges will be added to
2805 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2806 created. */
2807
2808 static bool
2809 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2810 struct cgraph_node *node,
2811 vec<cgraph_edge_p> *new_edges)
2812 {
2813 struct cgraph_edge *e;
2814 bool res;
2815
2816 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2817
2818 for (e = node->callees; e; e = e->next_callee)
2819 if (!e->inline_failed)
2820 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2821 else
2822 update_jump_functions_after_inlining (cs, e);
2823 for (e = node->indirect_calls; e; e = e->next_callee)
2824 update_jump_functions_after_inlining (cs, e);
2825
2826 return res;
2827 }
2828
2829 /* Combine two controlled uses counts as done during inlining. */
2830
2831 static int
2832 combine_controlled_uses_counters (int c, int d)
2833 {
2834 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2835 return IPA_UNDESCRIBED_USE;
2836 else
2837 return c + d - 1;
2838 }
2839
2840 /* Propagate number of controlled users from CS->caleee to the new root of the
2841 tree of inlined nodes. */
2842
2843 static void
2844 propagate_controlled_uses (struct cgraph_edge *cs)
2845 {
2846 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2847 struct cgraph_node *new_root = cs->caller->global.inlined_to
2848 ? cs->caller->global.inlined_to : cs->caller;
2849 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2850 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2851 int count, i;
2852
2853 count = MIN (ipa_get_cs_argument_count (args),
2854 ipa_get_param_count (old_root_info));
2855 for (i = 0; i < count; i++)
2856 {
2857 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2858 struct ipa_cst_ref_desc *rdesc;
2859
2860 if (jf->type == IPA_JF_PASS_THROUGH)
2861 {
2862 int src_idx, c, d;
2863 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2864 c = ipa_get_controlled_uses (new_root_info, src_idx);
2865 d = ipa_get_controlled_uses (old_root_info, i);
2866
2867 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2868 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2869 c = combine_controlled_uses_counters (c, d);
2870 ipa_set_controlled_uses (new_root_info, src_idx, c);
2871 if (c == 0 && new_root_info->ipcp_orig_node)
2872 {
2873 struct cgraph_node *n;
2874 struct ipa_ref *ref;
2875 tree t = new_root_info->known_vals[src_idx];
2876
2877 if (t && TREE_CODE (t) == ADDR_EXPR
2878 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2879 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2880 && (ref = ipa_find_reference (new_root,
2881 n, NULL, 0)))
2882 {
2883 if (dump_file)
2884 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2885 "reference from %s/%i to %s/%i.\n",
2886 xstrdup (new_root->name ()),
2887 new_root->order,
2888 xstrdup (n->name ()), n->order);
2889 ipa_remove_reference (ref);
2890 }
2891 }
2892 }
2893 else if (jf->type == IPA_JF_CONST
2894 && (rdesc = jfunc_rdesc_usable (jf)))
2895 {
2896 int d = ipa_get_controlled_uses (old_root_info, i);
2897 int c = rdesc->refcount;
2898 rdesc->refcount = combine_controlled_uses_counters (c, d);
2899 if (rdesc->refcount == 0)
2900 {
2901 tree cst = ipa_get_jf_constant (jf);
2902 struct cgraph_node *n;
2903 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2904 && TREE_CODE (TREE_OPERAND (cst, 0))
2905 == FUNCTION_DECL);
2906 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2907 if (n)
2908 {
2909 struct cgraph_node *clone;
2910 bool ok;
2911 ok = remove_described_reference (n, rdesc);
2912 gcc_checking_assert (ok);
2913
2914 clone = cs->caller;
2915 while (clone->global.inlined_to
2916 && clone != rdesc->cs->caller
2917 && IPA_NODE_REF (clone)->ipcp_orig_node)
2918 {
2919 struct ipa_ref *ref;
2920 ref = ipa_find_reference (clone,
2921 n, NULL, 0);
2922 if (ref)
2923 {
2924 if (dump_file)
2925 fprintf (dump_file, "ipa-prop: Removing "
2926 "cloning-created reference "
2927 "from %s/%i to %s/%i.\n",
2928 xstrdup (clone->name ()),
2929 clone->order,
2930 xstrdup (n->name ()),
2931 n->order);
2932 ipa_remove_reference (ref);
2933 }
2934 clone = clone->callers->caller;
2935 }
2936 }
2937 }
2938 }
2939 }
2940
2941 for (i = ipa_get_param_count (old_root_info);
2942 i < ipa_get_cs_argument_count (args);
2943 i++)
2944 {
2945 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2946
2947 if (jf->type == IPA_JF_CONST)
2948 {
2949 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2950 if (rdesc)
2951 rdesc->refcount = IPA_UNDESCRIBED_USE;
2952 }
2953 else if (jf->type == IPA_JF_PASS_THROUGH)
2954 ipa_set_controlled_uses (new_root_info,
2955 jf->value.pass_through.formal_id,
2956 IPA_UNDESCRIBED_USE);
2957 }
2958 }
2959
2960 /* Update jump functions and call note functions on inlining the call site CS.
2961 CS is expected to lead to a node already cloned by
2962 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2963 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2964 created. */
2965
2966 bool
2967 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2968 vec<cgraph_edge_p> *new_edges)
2969 {
2970 bool changed;
2971 /* Do nothing if the preparation phase has not been carried out yet
2972 (i.e. during early inlining). */
2973 if (!ipa_node_params_vector.exists ())
2974 return false;
2975 gcc_assert (ipa_edge_args_vector);
2976
2977 propagate_controlled_uses (cs);
2978 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2979
2980 return changed;
2981 }
2982
2983 /* Frees all dynamically allocated structures that the argument info points
2984 to. */
2985
2986 void
2987 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2988 {
2989 vec_free (args->jump_functions);
2990 memset (args, 0, sizeof (*args));
2991 }
2992
2993 /* Free all ipa_edge structures. */
2994
2995 void
2996 ipa_free_all_edge_args (void)
2997 {
2998 int i;
2999 struct ipa_edge_args *args;
3000
3001 if (!ipa_edge_args_vector)
3002 return;
3003
3004 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3005 ipa_free_edge_args_substructures (args);
3006
3007 vec_free (ipa_edge_args_vector);
3008 }
3009
3010 /* Frees all dynamically allocated structures that the param info points
3011 to. */
3012
3013 void
3014 ipa_free_node_params_substructures (struct ipa_node_params *info)
3015 {
3016 info->descriptors.release ();
3017 free (info->lattices);
3018 /* Lattice values and their sources are deallocated with their alocation
3019 pool. */
3020 info->known_vals.release ();
3021 memset (info, 0, sizeof (*info));
3022 }
3023
3024 /* Free all ipa_node_params structures. */
3025
3026 void
3027 ipa_free_all_node_params (void)
3028 {
3029 int i;
3030 struct ipa_node_params *info;
3031
3032 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3033 ipa_free_node_params_substructures (info);
3034
3035 ipa_node_params_vector.release ();
3036 }
3037
3038 /* Set the aggregate replacements of NODE to be AGGVALS. */
3039
3040 void
3041 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3042 struct ipa_agg_replacement_value *aggvals)
3043 {
3044 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3045 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3046
3047 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3048 }
3049
3050 /* Hook that is called by cgraph.c when an edge is removed. */
3051
3052 static void
3053 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3054 {
3055 struct ipa_edge_args *args;
3056
3057 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3058 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3059 return;
3060
3061 args = IPA_EDGE_REF (cs);
3062 if (args->jump_functions)
3063 {
3064 struct ipa_jump_func *jf;
3065 int i;
3066 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3067 {
3068 struct ipa_cst_ref_desc *rdesc;
3069 try_decrement_rdesc_refcount (jf);
3070 if (jf->type == IPA_JF_CONST
3071 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3072 && rdesc->cs == cs)
3073 rdesc->cs = NULL;
3074 }
3075 }
3076
3077 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3078 }
3079
3080 /* Hook that is called by cgraph.c when a node is removed. */
3081
3082 static void
3083 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3084 {
3085 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3086 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3087 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3088 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3089 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3090 }
3091
3092 /* Hook that is called by cgraph.c when an edge is duplicated. */
3093
3094 static void
3095 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3096 __attribute__((unused)) void *data)
3097 {
3098 struct ipa_edge_args *old_args, *new_args;
3099 unsigned int i;
3100
3101 ipa_check_create_edge_args ();
3102
3103 old_args = IPA_EDGE_REF (src);
3104 new_args = IPA_EDGE_REF (dst);
3105
3106 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3107
3108 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3109 {
3110 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3111 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3112
3113 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3114
3115 if (src_jf->type == IPA_JF_CONST)
3116 {
3117 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3118
3119 if (!src_rdesc)
3120 dst_jf->value.constant.rdesc = NULL;
3121 else if (src->caller == dst->caller)
3122 {
3123 struct ipa_ref *ref;
3124 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3125 gcc_checking_assert (n);
3126 ref = ipa_find_reference (src->caller, n,
3127 src->call_stmt, src->lto_stmt_uid);
3128 gcc_checking_assert (ref);
3129 ipa_clone_ref (ref, dst->caller, ref->stmt);
3130
3131 gcc_checking_assert (ipa_refdesc_pool);
3132 struct ipa_cst_ref_desc *dst_rdesc
3133 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3134 dst_rdesc->cs = dst;
3135 dst_rdesc->refcount = src_rdesc->refcount;
3136 dst_rdesc->next_duplicate = NULL;
3137 dst_jf->value.constant.rdesc = dst_rdesc;
3138 }
3139 else if (src_rdesc->cs == src)
3140 {
3141 struct ipa_cst_ref_desc *dst_rdesc;
3142 gcc_checking_assert (ipa_refdesc_pool);
3143 dst_rdesc
3144 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3145 dst_rdesc->cs = dst;
3146 dst_rdesc->refcount = src_rdesc->refcount;
3147 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3148 src_rdesc->next_duplicate = dst_rdesc;
3149 dst_jf->value.constant.rdesc = dst_rdesc;
3150 }
3151 else
3152 {
3153 struct ipa_cst_ref_desc *dst_rdesc;
3154 /* This can happen during inlining, when a JFUNC can refer to a
3155 reference taken in a function up in the tree of inline clones.
3156 We need to find the duplicate that refers to our tree of
3157 inline clones. */
3158
3159 gcc_assert (dst->caller->global.inlined_to);
3160 for (dst_rdesc = src_rdesc->next_duplicate;
3161 dst_rdesc;
3162 dst_rdesc = dst_rdesc->next_duplicate)
3163 {
3164 struct cgraph_node *top;
3165 top = dst_rdesc->cs->caller->global.inlined_to
3166 ? dst_rdesc->cs->caller->global.inlined_to
3167 : dst_rdesc->cs->caller;
3168 if (dst->caller->global.inlined_to == top)
3169 break;
3170 }
3171 gcc_assert (dst_rdesc);
3172 dst_jf->value.constant.rdesc = dst_rdesc;
3173 }
3174 }
3175 }
3176 }
3177
3178 /* Hook that is called by cgraph.c when a node is duplicated. */
3179
3180 static void
3181 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3182 ATTRIBUTE_UNUSED void *data)
3183 {
3184 struct ipa_node_params *old_info, *new_info;
3185 struct ipa_agg_replacement_value *old_av, *new_av;
3186
3187 ipa_check_create_node_params ();
3188 old_info = IPA_NODE_REF (src);
3189 new_info = IPA_NODE_REF (dst);
3190
3191 new_info->descriptors = old_info->descriptors.copy ();
3192 new_info->lattices = NULL;
3193 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3194
3195 new_info->uses_analysis_done = old_info->uses_analysis_done;
3196 new_info->node_enqueued = old_info->node_enqueued;
3197
3198 old_av = ipa_get_agg_replacements_for_node (src);
3199 if (!old_av)
3200 return;
3201
3202 new_av = NULL;
3203 while (old_av)
3204 {
3205 struct ipa_agg_replacement_value *v;
3206
3207 v = ggc_alloc_ipa_agg_replacement_value ();
3208 memcpy (v, old_av, sizeof (*v));
3209 v->next = new_av;
3210 new_av = v;
3211 old_av = old_av->next;
3212 }
3213 ipa_set_node_agg_value_chain (dst, new_av);
3214 }
3215
3216
3217 /* Analyze newly added function into callgraph. */
3218
3219 static void
3220 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3221 {
3222 if (cgraph_function_with_gimple_body_p (node))
3223 ipa_analyze_node (node);
3224 }
3225
3226 /* Register our cgraph hooks if they are not already there. */
3227
3228 void
3229 ipa_register_cgraph_hooks (void)
3230 {
3231 if (!edge_removal_hook_holder)
3232 edge_removal_hook_holder =
3233 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3234 if (!node_removal_hook_holder)
3235 node_removal_hook_holder =
3236 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3237 if (!edge_duplication_hook_holder)
3238 edge_duplication_hook_holder =
3239 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3240 if (!node_duplication_hook_holder)
3241 node_duplication_hook_holder =
3242 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3243 function_insertion_hook_holder =
3244 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3245 }
3246
3247 /* Unregister our cgraph hooks if they are not already there. */
3248
3249 static void
3250 ipa_unregister_cgraph_hooks (void)
3251 {
3252 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3253 edge_removal_hook_holder = NULL;
3254 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3255 node_removal_hook_holder = NULL;
3256 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3257 edge_duplication_hook_holder = NULL;
3258 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3259 node_duplication_hook_holder = NULL;
3260 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3261 function_insertion_hook_holder = NULL;
3262 }
3263
3264 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3265 longer needed after ipa-cp. */
3266
3267 void
3268 ipa_free_all_structures_after_ipa_cp (void)
3269 {
3270 if (!optimize)
3271 {
3272 ipa_free_all_edge_args ();
3273 ipa_free_all_node_params ();
3274 free_alloc_pool (ipcp_sources_pool);
3275 free_alloc_pool (ipcp_values_pool);
3276 free_alloc_pool (ipcp_agg_lattice_pool);
3277 ipa_unregister_cgraph_hooks ();
3278 if (ipa_refdesc_pool)
3279 free_alloc_pool (ipa_refdesc_pool);
3280 }
3281 }
3282
3283 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3284 longer needed after indirect inlining. */
3285
3286 void
3287 ipa_free_all_structures_after_iinln (void)
3288 {
3289 ipa_free_all_edge_args ();
3290 ipa_free_all_node_params ();
3291 ipa_unregister_cgraph_hooks ();
3292 if (ipcp_sources_pool)
3293 free_alloc_pool (ipcp_sources_pool);
3294 if (ipcp_values_pool)
3295 free_alloc_pool (ipcp_values_pool);
3296 if (ipcp_agg_lattice_pool)
3297 free_alloc_pool (ipcp_agg_lattice_pool);
3298 if (ipa_refdesc_pool)
3299 free_alloc_pool (ipa_refdesc_pool);
3300 }
3301
3302 /* Print ipa_tree_map data structures of all functions in the
3303 callgraph to F. */
3304
3305 void
3306 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3307 {
3308 int i, count;
3309 struct ipa_node_params *info;
3310
3311 if (!node->definition)
3312 return;
3313 info = IPA_NODE_REF (node);
3314 fprintf (f, " function %s/%i parameter descriptors:\n",
3315 node->name (), node->order);
3316 count = ipa_get_param_count (info);
3317 for (i = 0; i < count; i++)
3318 {
3319 int c;
3320
3321 ipa_dump_param (f, info, i);
3322 if (ipa_is_param_used (info, i))
3323 fprintf (f, " used");
3324 c = ipa_get_controlled_uses (info, i);
3325 if (c == IPA_UNDESCRIBED_USE)
3326 fprintf (f, " undescribed_use");
3327 else
3328 fprintf (f, " controlled_uses=%i", c);
3329 fprintf (f, "\n");
3330 }
3331 }
3332
3333 /* Print ipa_tree_map data structures of all functions in the
3334 callgraph to F. */
3335
3336 void
3337 ipa_print_all_params (FILE * f)
3338 {
3339 struct cgraph_node *node;
3340
3341 fprintf (f, "\nFunction parameters:\n");
3342 FOR_EACH_FUNCTION (node)
3343 ipa_print_node_params (f, node);
3344 }
3345
3346 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3347
3348 vec<tree>
3349 ipa_get_vector_of_formal_parms (tree fndecl)
3350 {
3351 vec<tree> args;
3352 int count;
3353 tree parm;
3354
3355 gcc_assert (!flag_wpa);
3356 count = count_formal_params (fndecl);
3357 args.create (count);
3358 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3359 args.quick_push (parm);
3360
3361 return args;
3362 }
3363
3364 /* Return a heap allocated vector containing types of formal parameters of
3365 function type FNTYPE. */
3366
3367 vec<tree>
3368 ipa_get_vector_of_formal_parm_types (tree fntype)
3369 {
3370 vec<tree> types;
3371 int count = 0;
3372 tree t;
3373
3374 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3375 count++;
3376
3377 types.create (count);
3378 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3379 types.quick_push (TREE_VALUE (t));
3380
3381 return types;
3382 }
3383
3384 /* Modify the function declaration FNDECL and its type according to the plan in
3385 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3386 to reflect the actual parameters being modified which are determined by the
3387 base_index field. */
3388
3389 void
3390 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3391 {
3392 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3393 tree orig_type = TREE_TYPE (fndecl);
3394 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3395
3396 /* The following test is an ugly hack, some functions simply don't have any
3397 arguments in their type. This is probably a bug but well... */
3398 bool care_for_types = (old_arg_types != NULL_TREE);
3399 bool last_parm_void;
3400 vec<tree> otypes;
3401 if (care_for_types)
3402 {
3403 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3404 == void_type_node);
3405 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3406 if (last_parm_void)
3407 gcc_assert (oparms.length () + 1 == otypes.length ());
3408 else
3409 gcc_assert (oparms.length () == otypes.length ());
3410 }
3411 else
3412 {
3413 last_parm_void = false;
3414 otypes.create (0);
3415 }
3416
3417 int len = adjustments.length ();
3418 tree *link = &DECL_ARGUMENTS (fndecl);
3419 tree new_arg_types = NULL;
3420 for (int i = 0; i < len; i++)
3421 {
3422 struct ipa_parm_adjustment *adj;
3423 gcc_assert (link);
3424
3425 adj = &adjustments[i];
3426 tree parm;
3427 if (adj->op == IPA_PARM_OP_NEW)
3428 parm = NULL;
3429 else
3430 parm = oparms[adj->base_index];
3431 adj->base = parm;
3432
3433 if (adj->op == IPA_PARM_OP_COPY)
3434 {
3435 if (care_for_types)
3436 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3437 new_arg_types);
3438 *link = parm;
3439 link = &DECL_CHAIN (parm);
3440 }
3441 else if (adj->op != IPA_PARM_OP_REMOVE)
3442 {
3443 tree new_parm;
3444 tree ptype;
3445
3446 if (adj->by_ref)
3447 ptype = build_pointer_type (adj->type);
3448 else
3449 {
3450 ptype = adj->type;
3451 if (is_gimple_reg_type (ptype))
3452 {
3453 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3454 if (TYPE_ALIGN (ptype) < malign)
3455 ptype = build_aligned_type (ptype, malign);
3456 }
3457 }
3458
3459 if (care_for_types)
3460 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3461
3462 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3463 ptype);
3464 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3465 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3466 DECL_ARTIFICIAL (new_parm) = 1;
3467 DECL_ARG_TYPE (new_parm) = ptype;
3468 DECL_CONTEXT (new_parm) = fndecl;
3469 TREE_USED (new_parm) = 1;
3470 DECL_IGNORED_P (new_parm) = 1;
3471 layout_decl (new_parm, 0);
3472
3473 if (adj->op == IPA_PARM_OP_NEW)
3474 adj->base = NULL;
3475 else
3476 adj->base = parm;
3477 adj->new_decl = new_parm;
3478
3479 *link = new_parm;
3480 link = &DECL_CHAIN (new_parm);
3481 }
3482 }
3483
3484 *link = NULL_TREE;
3485
3486 tree new_reversed = NULL;
3487 if (care_for_types)
3488 {
3489 new_reversed = nreverse (new_arg_types);
3490 if (last_parm_void)
3491 {
3492 if (new_reversed)
3493 TREE_CHAIN (new_arg_types) = void_list_node;
3494 else
3495 new_reversed = void_list_node;
3496 }
3497 }
3498
3499 /* Use copy_node to preserve as much as possible from original type
3500 (debug info, attribute lists etc.)
3501 Exception is METHOD_TYPEs must have THIS argument.
3502 When we are asked to remove it, we need to build new FUNCTION_TYPE
3503 instead. */
3504 tree new_type = NULL;
3505 if (TREE_CODE (orig_type) != METHOD_TYPE
3506 || (adjustments[0].op == IPA_PARM_OP_COPY
3507 && adjustments[0].base_index == 0))
3508 {
3509 new_type = build_distinct_type_copy (orig_type);
3510 TYPE_ARG_TYPES (new_type) = new_reversed;
3511 }
3512 else
3513 {
3514 new_type
3515 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3516 new_reversed));
3517 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3518 DECL_VINDEX (fndecl) = NULL_TREE;
3519 }
3520
3521 /* When signature changes, we need to clear builtin info. */
3522 if (DECL_BUILT_IN (fndecl))
3523 {
3524 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3525 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3526 }
3527
3528 /* This is a new type, not a copy of an old type. Need to reassociate
3529 variants. We can handle everything except the main variant lazily. */
3530 tree t = TYPE_MAIN_VARIANT (orig_type);
3531 if (orig_type != t)
3532 {
3533 TYPE_MAIN_VARIANT (new_type) = t;
3534 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3535 TYPE_NEXT_VARIANT (t) = new_type;
3536 }
3537 else
3538 {
3539 TYPE_MAIN_VARIANT (new_type) = new_type;
3540 TYPE_NEXT_VARIANT (new_type) = NULL;
3541 }
3542
3543 TREE_TYPE (fndecl) = new_type;
3544 DECL_VIRTUAL_P (fndecl) = 0;
3545 otypes.release ();
3546 oparms.release ();
3547 }
3548
3549 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3550 If this is a directly recursive call, CS must be NULL. Otherwise it must
3551 contain the corresponding call graph edge. */
3552
3553 void
3554 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3555 ipa_parm_adjustment_vec adjustments)
3556 {
3557 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3558 vec<tree> vargs;
3559 vec<tree, va_gc> **debug_args = NULL;
3560 gimple new_stmt;
3561 gimple_stmt_iterator gsi, prev_gsi;
3562 tree callee_decl;
3563 int i, len;
3564
3565 len = adjustments.length ();
3566 vargs.create (len);
3567 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3568 ipa_remove_stmt_references (current_node, stmt);
3569
3570 gsi = gsi_for_stmt (stmt);
3571 prev_gsi = gsi;
3572 gsi_prev (&prev_gsi);
3573 for (i = 0; i < len; i++)
3574 {
3575 struct ipa_parm_adjustment *adj;
3576
3577 adj = &adjustments[i];
3578
3579 if (adj->op == IPA_PARM_OP_COPY)
3580 {
3581 tree arg = gimple_call_arg (stmt, adj->base_index);
3582
3583 vargs.quick_push (arg);
3584 }
3585 else if (adj->op != IPA_PARM_OP_REMOVE)
3586 {
3587 tree expr, base, off;
3588 location_t loc;
3589 unsigned int deref_align = 0;
3590 bool deref_base = false;
3591
3592 /* We create a new parameter out of the value of the old one, we can
3593 do the following kind of transformations:
3594
3595 - A scalar passed by reference is converted to a scalar passed by
3596 value. (adj->by_ref is false and the type of the original
3597 actual argument is a pointer to a scalar).
3598
3599 - A part of an aggregate is passed instead of the whole aggregate.
3600 The part can be passed either by value or by reference, this is
3601 determined by value of adj->by_ref. Moreover, the code below
3602 handles both situations when the original aggregate is passed by
3603 value (its type is not a pointer) and when it is passed by
3604 reference (it is a pointer to an aggregate).
3605
3606 When the new argument is passed by reference (adj->by_ref is true)
3607 it must be a part of an aggregate and therefore we form it by
3608 simply taking the address of a reference inside the original
3609 aggregate. */
3610
3611 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3612 base = gimple_call_arg (stmt, adj->base_index);
3613 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3614 : EXPR_LOCATION (base);
3615
3616 if (TREE_CODE (base) != ADDR_EXPR
3617 && POINTER_TYPE_P (TREE_TYPE (base)))
3618 off = build_int_cst (adj->alias_ptr_type,
3619 adj->offset / BITS_PER_UNIT);
3620 else
3621 {
3622 HOST_WIDE_INT base_offset;
3623 tree prev_base;
3624 bool addrof;
3625
3626 if (TREE_CODE (base) == ADDR_EXPR)
3627 {
3628 base = TREE_OPERAND (base, 0);
3629 addrof = true;
3630 }
3631 else
3632 addrof = false;
3633 prev_base = base;
3634 base = get_addr_base_and_unit_offset (base, &base_offset);
3635 /* Aggregate arguments can have non-invariant addresses. */
3636 if (!base)
3637 {
3638 base = build_fold_addr_expr (prev_base);
3639 off = build_int_cst (adj->alias_ptr_type,
3640 adj->offset / BITS_PER_UNIT);
3641 }
3642 else if (TREE_CODE (base) == MEM_REF)
3643 {
3644 if (!addrof)
3645 {
3646 deref_base = true;
3647 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3648 }
3649 off = build_int_cst (adj->alias_ptr_type,
3650 base_offset
3651 + adj->offset / BITS_PER_UNIT);
3652 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3653 off);
3654 base = TREE_OPERAND (base, 0);
3655 }
3656 else
3657 {
3658 off = build_int_cst (adj->alias_ptr_type,
3659 base_offset
3660 + adj->offset / BITS_PER_UNIT);
3661 base = build_fold_addr_expr (base);
3662 }
3663 }
3664
3665 if (!adj->by_ref)
3666 {
3667 tree type = adj->type;
3668 unsigned int align;
3669 unsigned HOST_WIDE_INT misalign;
3670
3671 if (deref_base)
3672 {
3673 align = deref_align;
3674 misalign = 0;
3675 }
3676 else
3677 {
3678 get_pointer_alignment_1 (base, &align, &misalign);
3679 if (TYPE_ALIGN (type) > align)
3680 align = TYPE_ALIGN (type);
3681 }
3682 misalign += (tree_to_double_int (off)
3683 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3684 * BITS_PER_UNIT);
3685 misalign = misalign & (align - 1);
3686 if (misalign != 0)
3687 align = (misalign & -misalign);
3688 if (align < TYPE_ALIGN (type))
3689 type = build_aligned_type (type, align);
3690 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3691 }
3692 else
3693 {
3694 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3695 expr = build_fold_addr_expr (expr);
3696 }
3697
3698 expr = force_gimple_operand_gsi (&gsi, expr,
3699 adj->by_ref
3700 || is_gimple_reg_type (adj->type),
3701 NULL, true, GSI_SAME_STMT);
3702 vargs.quick_push (expr);
3703 }
3704 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
3705 {
3706 unsigned int ix;
3707 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3708 gimple def_temp;
3709
3710 arg = gimple_call_arg (stmt, adj->base_index);
3711 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3712 {
3713 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3714 continue;
3715 arg = fold_convert_loc (gimple_location (stmt),
3716 TREE_TYPE (origin), arg);
3717 }
3718 if (debug_args == NULL)
3719 debug_args = decl_debug_args_insert (callee_decl);
3720 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3721 if (ddecl == origin)
3722 {
3723 ddecl = (**debug_args)[ix + 1];
3724 break;
3725 }
3726 if (ddecl == NULL)
3727 {
3728 ddecl = make_node (DEBUG_EXPR_DECL);
3729 DECL_ARTIFICIAL (ddecl) = 1;
3730 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3731 DECL_MODE (ddecl) = DECL_MODE (origin);
3732
3733 vec_safe_push (*debug_args, origin);
3734 vec_safe_push (*debug_args, ddecl);
3735 }
3736 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3737 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3738 }
3739 }
3740
3741 if (dump_file && (dump_flags & TDF_DETAILS))
3742 {
3743 fprintf (dump_file, "replacing stmt:");
3744 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3745 }
3746
3747 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3748 vargs.release ();
3749 if (gimple_call_lhs (stmt))
3750 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3751
3752 gimple_set_block (new_stmt, gimple_block (stmt));
3753 if (gimple_has_location (stmt))
3754 gimple_set_location (new_stmt, gimple_location (stmt));
3755 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3756 gimple_call_copy_flags (new_stmt, stmt);
3757
3758 if (dump_file && (dump_flags & TDF_DETAILS))
3759 {
3760 fprintf (dump_file, "with stmt:");
3761 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3762 fprintf (dump_file, "\n");
3763 }
3764 gsi_replace (&gsi, new_stmt, true);
3765 if (cs)
3766 cgraph_set_call_stmt (cs, new_stmt);
3767 do
3768 {
3769 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3770 gsi_prev (&gsi);
3771 }
3772 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3773 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3774
3775 update_ssa (TODO_update_ssa);
3776 free_dominance_info (CDI_DOMINATORS);
3777 }
3778
3779 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3780 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3781 specifies whether the function should care about type incompatibility the
3782 current and new expressions. If it is false, the function will leave
3783 incompatibility issues to the caller. Return true iff the expression
3784 was modified. */
3785
3786 bool
3787 ipa_modify_expr (tree *expr, bool convert,
3788 ipa_parm_adjustment_vec adjustments)
3789 {
3790 struct ipa_parm_adjustment *cand
3791 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
3792 if (!cand)
3793 return false;
3794
3795 tree src;
3796 if (cand->by_ref)
3797 src = build_simple_mem_ref (cand->new_decl);
3798 else
3799 src = cand->new_decl;
3800
3801 if (dump_file && (dump_flags & TDF_DETAILS))
3802 {
3803 fprintf (dump_file, "About to replace expr ");
3804 print_generic_expr (dump_file, *expr, 0);
3805 fprintf (dump_file, " with ");
3806 print_generic_expr (dump_file, src, 0);
3807 fprintf (dump_file, "\n");
3808 }
3809
3810 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3811 {
3812 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3813 *expr = vce;
3814 }
3815 else
3816 *expr = src;
3817 return true;
3818 }
3819
3820 /* If T is an SSA_NAME, return NULL if it is not a default def or
3821 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
3822 the base variable is always returned, regardless if it is a default
3823 def. Return T if it is not an SSA_NAME. */
3824
3825 static tree
3826 get_ssa_base_param (tree t, bool ignore_default_def)
3827 {
3828 if (TREE_CODE (t) == SSA_NAME)
3829 {
3830 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
3831 return SSA_NAME_VAR (t);
3832 else
3833 return NULL_TREE;
3834 }
3835 return t;
3836 }
3837
3838 /* Given an expression, return an adjustment entry specifying the
3839 transformation to be done on EXPR. If no suitable adjustment entry
3840 was found, returns NULL.
3841
3842 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
3843 default def, otherwise bail on them.
3844
3845 If CONVERT is non-NULL, this function will set *CONVERT if the
3846 expression provided is a component reference. ADJUSTMENTS is the
3847 adjustments vector. */
3848
3849 ipa_parm_adjustment *
3850 ipa_get_adjustment_candidate (tree **expr, bool *convert,
3851 ipa_parm_adjustment_vec adjustments,
3852 bool ignore_default_def)
3853 {
3854 if (TREE_CODE (**expr) == BIT_FIELD_REF
3855 || TREE_CODE (**expr) == IMAGPART_EXPR
3856 || TREE_CODE (**expr) == REALPART_EXPR)
3857 {
3858 *expr = &TREE_OPERAND (**expr, 0);
3859 if (convert)
3860 *convert = true;
3861 }
3862
3863 HOST_WIDE_INT offset, size, max_size;
3864 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
3865 if (!base || size == -1 || max_size == -1)
3866 return NULL;
3867
3868 if (TREE_CODE (base) == MEM_REF)
3869 {
3870 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
3871 base = TREE_OPERAND (base, 0);
3872 }
3873
3874 base = get_ssa_base_param (base, ignore_default_def);
3875 if (!base || TREE_CODE (base) != PARM_DECL)
3876 return NULL;
3877
3878 struct ipa_parm_adjustment *cand = NULL;
3879 unsigned int len = adjustments.length ();
3880 for (unsigned i = 0; i < len; i++)
3881 {
3882 struct ipa_parm_adjustment *adj = &adjustments[i];
3883
3884 if (adj->base == base
3885 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
3886 {
3887 cand = adj;
3888 break;
3889 }
3890 }
3891
3892 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
3893 return NULL;
3894 return cand;
3895 }
3896
3897 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3898
3899 static bool
3900 index_in_adjustments_multiple_times_p (int base_index,
3901 ipa_parm_adjustment_vec adjustments)
3902 {
3903 int i, len = adjustments.length ();
3904 bool one = false;
3905
3906 for (i = 0; i < len; i++)
3907 {
3908 struct ipa_parm_adjustment *adj;
3909 adj = &adjustments[i];
3910
3911 if (adj->base_index == base_index)
3912 {
3913 if (one)
3914 return true;
3915 else
3916 one = true;
3917 }
3918 }
3919 return false;
3920 }
3921
3922
3923 /* Return adjustments that should have the same effect on function parameters
3924 and call arguments as if they were first changed according to adjustments in
3925 INNER and then by adjustments in OUTER. */
3926
3927 ipa_parm_adjustment_vec
3928 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3929 ipa_parm_adjustment_vec outer)
3930 {
3931 int i, outlen = outer.length ();
3932 int inlen = inner.length ();
3933 int removals = 0;
3934 ipa_parm_adjustment_vec adjustments, tmp;
3935
3936 tmp.create (inlen);
3937 for (i = 0; i < inlen; i++)
3938 {
3939 struct ipa_parm_adjustment *n;
3940 n = &inner[i];
3941
3942 if (n->op == IPA_PARM_OP_REMOVE)
3943 removals++;
3944 else
3945 {
3946 /* FIXME: Handling of new arguments are not implemented yet. */
3947 gcc_assert (n->op != IPA_PARM_OP_NEW);
3948 tmp.quick_push (*n);
3949 }
3950 }
3951
3952 adjustments.create (outlen + removals);
3953 for (i = 0; i < outlen; i++)
3954 {
3955 struct ipa_parm_adjustment r;
3956 struct ipa_parm_adjustment *out = &outer[i];
3957 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3958
3959 memset (&r, 0, sizeof (r));
3960 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
3961 if (out->op == IPA_PARM_OP_REMOVE)
3962 {
3963 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3964 {
3965 r.op = IPA_PARM_OP_REMOVE;
3966 adjustments.quick_push (r);
3967 }
3968 continue;
3969 }
3970 else
3971 {
3972 /* FIXME: Handling of new arguments are not implemented yet. */
3973 gcc_assert (out->op != IPA_PARM_OP_NEW);
3974 }
3975
3976 r.base_index = in->base_index;
3977 r.type = out->type;
3978
3979 /* FIXME: Create nonlocal value too. */
3980
3981 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
3982 r.op = IPA_PARM_OP_COPY;
3983 else if (in->op == IPA_PARM_OP_COPY)
3984 r.offset = out->offset;
3985 else if (out->op == IPA_PARM_OP_COPY)
3986 r.offset = in->offset;
3987 else
3988 r.offset = in->offset + out->offset;
3989 adjustments.quick_push (r);
3990 }
3991
3992 for (i = 0; i < inlen; i++)
3993 {
3994 struct ipa_parm_adjustment *n = &inner[i];
3995
3996 if (n->op == IPA_PARM_OP_REMOVE)
3997 adjustments.quick_push (*n);
3998 }
3999
4000 tmp.release ();
4001 return adjustments;
4002 }
4003
4004 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4005 friendly way, assuming they are meant to be applied to FNDECL. */
4006
4007 void
4008 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4009 tree fndecl)
4010 {
4011 int i, len = adjustments.length ();
4012 bool first = true;
4013 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4014
4015 fprintf (file, "IPA param adjustments: ");
4016 for (i = 0; i < len; i++)
4017 {
4018 struct ipa_parm_adjustment *adj;
4019 adj = &adjustments[i];
4020
4021 if (!first)
4022 fprintf (file, " ");
4023 else
4024 first = false;
4025
4026 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4027 print_generic_expr (file, parms[adj->base_index], 0);
4028 if (adj->base)
4029 {
4030 fprintf (file, ", base: ");
4031 print_generic_expr (file, adj->base, 0);
4032 }
4033 if (adj->new_decl)
4034 {
4035 fprintf (file, ", new_decl: ");
4036 print_generic_expr (file, adj->new_decl, 0);
4037 }
4038 if (adj->new_ssa_base)
4039 {
4040 fprintf (file, ", new_ssa_base: ");
4041 print_generic_expr (file, adj->new_ssa_base, 0);
4042 }
4043
4044 if (adj->op == IPA_PARM_OP_COPY)
4045 fprintf (file, ", copy_param");
4046 else if (adj->op == IPA_PARM_OP_REMOVE)
4047 fprintf (file, ", remove_param");
4048 else
4049 fprintf (file, ", offset %li", (long) adj->offset);
4050 if (adj->by_ref)
4051 fprintf (file, ", by_ref");
4052 print_node_brief (file, ", type: ", adj->type, 0);
4053 fprintf (file, "\n");
4054 }
4055 parms.release ();
4056 }
4057
4058 /* Dump the AV linked list. */
4059
4060 void
4061 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4062 {
4063 bool comma = false;
4064 fprintf (f, " Aggregate replacements:");
4065 for (; av; av = av->next)
4066 {
4067 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4068 av->index, av->offset);
4069 print_generic_expr (f, av->value, 0);
4070 comma = true;
4071 }
4072 fprintf (f, "\n");
4073 }
4074
4075 /* Stream out jump function JUMP_FUNC to OB. */
4076
4077 static void
4078 ipa_write_jump_function (struct output_block *ob,
4079 struct ipa_jump_func *jump_func)
4080 {
4081 struct ipa_agg_jf_item *item;
4082 struct bitpack_d bp;
4083 int i, count;
4084
4085 streamer_write_uhwi (ob, jump_func->type);
4086 switch (jump_func->type)
4087 {
4088 case IPA_JF_UNKNOWN:
4089 break;
4090 case IPA_JF_KNOWN_TYPE:
4091 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4092 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4093 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4094 break;
4095 case IPA_JF_CONST:
4096 gcc_assert (
4097 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4098 stream_write_tree (ob, jump_func->value.constant.value, true);
4099 break;
4100 case IPA_JF_PASS_THROUGH:
4101 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4102 if (jump_func->value.pass_through.operation == NOP_EXPR)
4103 {
4104 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4105 bp = bitpack_create (ob->main_stream);
4106 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4107 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4108 streamer_write_bitpack (&bp);
4109 }
4110 else
4111 {
4112 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4113 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4114 }
4115 break;
4116 case IPA_JF_ANCESTOR:
4117 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4118 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4119 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4120 bp = bitpack_create (ob->main_stream);
4121 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4122 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4123 streamer_write_bitpack (&bp);
4124 break;
4125 }
4126
4127 count = vec_safe_length (jump_func->agg.items);
4128 streamer_write_uhwi (ob, count);
4129 if (count)
4130 {
4131 bp = bitpack_create (ob->main_stream);
4132 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4133 streamer_write_bitpack (&bp);
4134 }
4135
4136 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4137 {
4138 streamer_write_uhwi (ob, item->offset);
4139 stream_write_tree (ob, item->value, true);
4140 }
4141 }
4142
4143 /* Read in jump function JUMP_FUNC from IB. */
4144
4145 static void
4146 ipa_read_jump_function (struct lto_input_block *ib,
4147 struct ipa_jump_func *jump_func,
4148 struct cgraph_edge *cs,
4149 struct data_in *data_in)
4150 {
4151 enum jump_func_type jftype;
4152 enum tree_code operation;
4153 int i, count;
4154
4155 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4156 switch (jftype)
4157 {
4158 case IPA_JF_UNKNOWN:
4159 jump_func->type = IPA_JF_UNKNOWN;
4160 break;
4161 case IPA_JF_KNOWN_TYPE:
4162 {
4163 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4164 tree base_type = stream_read_tree (ib, data_in);
4165 tree component_type = stream_read_tree (ib, data_in);
4166
4167 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4168 break;
4169 }
4170 case IPA_JF_CONST:
4171 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4172 break;
4173 case IPA_JF_PASS_THROUGH:
4174 operation = (enum tree_code) streamer_read_uhwi (ib);
4175 if (operation == NOP_EXPR)
4176 {
4177 int formal_id = streamer_read_uhwi (ib);
4178 struct bitpack_d bp = streamer_read_bitpack (ib);
4179 bool agg_preserved = bp_unpack_value (&bp, 1);
4180 bool type_preserved = bp_unpack_value (&bp, 1);
4181 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4182 type_preserved);
4183 }
4184 else
4185 {
4186 tree operand = stream_read_tree (ib, data_in);
4187 int formal_id = streamer_read_uhwi (ib);
4188 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4189 operation);
4190 }
4191 break;
4192 case IPA_JF_ANCESTOR:
4193 {
4194 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4195 tree type = stream_read_tree (ib, data_in);
4196 int formal_id = streamer_read_uhwi (ib);
4197 struct bitpack_d bp = streamer_read_bitpack (ib);
4198 bool agg_preserved = bp_unpack_value (&bp, 1);
4199 bool type_preserved = bp_unpack_value (&bp, 1);
4200
4201 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4202 type_preserved);
4203 break;
4204 }
4205 }
4206
4207 count = streamer_read_uhwi (ib);
4208 vec_alloc (jump_func->agg.items, count);
4209 if (count)
4210 {
4211 struct bitpack_d bp = streamer_read_bitpack (ib);
4212 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4213 }
4214 for (i = 0; i < count; i++)
4215 {
4216 struct ipa_agg_jf_item item;
4217 item.offset = streamer_read_uhwi (ib);
4218 item.value = stream_read_tree (ib, data_in);
4219 jump_func->agg.items->quick_push (item);
4220 }
4221 }
4222
4223 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4224 relevant to indirect inlining to OB. */
4225
4226 static void
4227 ipa_write_indirect_edge_info (struct output_block *ob,
4228 struct cgraph_edge *cs)
4229 {
4230 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4231 struct bitpack_d bp;
4232
4233 streamer_write_hwi (ob, ii->param_index);
4234 streamer_write_hwi (ob, ii->offset);
4235 bp = bitpack_create (ob->main_stream);
4236 bp_pack_value (&bp, ii->polymorphic, 1);
4237 bp_pack_value (&bp, ii->agg_contents, 1);
4238 bp_pack_value (&bp, ii->member_ptr, 1);
4239 bp_pack_value (&bp, ii->by_ref, 1);
4240 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4241 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4242 streamer_write_bitpack (&bp);
4243
4244 if (ii->polymorphic)
4245 {
4246 streamer_write_hwi (ob, ii->otr_token);
4247 stream_write_tree (ob, ii->otr_type, true);
4248 stream_write_tree (ob, ii->outer_type, true);
4249 }
4250 }
4251
4252 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4253 relevant to indirect inlining from IB. */
4254
4255 static void
4256 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4257 struct data_in *data_in ATTRIBUTE_UNUSED,
4258 struct cgraph_edge *cs)
4259 {
4260 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4261 struct bitpack_d bp;
4262
4263 ii->param_index = (int) streamer_read_hwi (ib);
4264 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4265 bp = streamer_read_bitpack (ib);
4266 ii->polymorphic = bp_unpack_value (&bp, 1);
4267 ii->agg_contents = bp_unpack_value (&bp, 1);
4268 ii->member_ptr = bp_unpack_value (&bp, 1);
4269 ii->by_ref = bp_unpack_value (&bp, 1);
4270 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4271 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4272 if (ii->polymorphic)
4273 {
4274 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4275 ii->otr_type = stream_read_tree (ib, data_in);
4276 ii->outer_type = stream_read_tree (ib, data_in);
4277 }
4278 }
4279
4280 /* Stream out NODE info to OB. */
4281
4282 static void
4283 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4284 {
4285 int node_ref;
4286 lto_symtab_encoder_t encoder;
4287 struct ipa_node_params *info = IPA_NODE_REF (node);
4288 int j;
4289 struct cgraph_edge *e;
4290 struct bitpack_d bp;
4291
4292 encoder = ob->decl_state->symtab_node_encoder;
4293 node_ref = lto_symtab_encoder_encode (encoder, node);
4294 streamer_write_uhwi (ob, node_ref);
4295
4296 streamer_write_uhwi (ob, ipa_get_param_count (info));
4297 for (j = 0; j < ipa_get_param_count (info); j++)
4298 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4299 bp = bitpack_create (ob->main_stream);
4300 gcc_assert (info->uses_analysis_done
4301 || ipa_get_param_count (info) == 0);
4302 gcc_assert (!info->node_enqueued);
4303 gcc_assert (!info->ipcp_orig_node);
4304 for (j = 0; j < ipa_get_param_count (info); j++)
4305 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4306 streamer_write_bitpack (&bp);
4307 for (j = 0; j < ipa_get_param_count (info); j++)
4308 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4309 for (e = node->callees; e; e = e->next_callee)
4310 {
4311 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4312
4313 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4314 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4315 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4316 }
4317 for (e = node->indirect_calls; e; e = e->next_callee)
4318 {
4319 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4320
4321 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4322 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4323 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4324 ipa_write_indirect_edge_info (ob, e);
4325 }
4326 }
4327
4328 /* Stream in NODE info from IB. */
4329
4330 static void
4331 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4332 struct data_in *data_in)
4333 {
4334 struct ipa_node_params *info = IPA_NODE_REF (node);
4335 int k;
4336 struct cgraph_edge *e;
4337 struct bitpack_d bp;
4338
4339 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4340
4341 for (k = 0; k < ipa_get_param_count (info); k++)
4342 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4343
4344 bp = streamer_read_bitpack (ib);
4345 if (ipa_get_param_count (info) != 0)
4346 info->uses_analysis_done = true;
4347 info->node_enqueued = false;
4348 for (k = 0; k < ipa_get_param_count (info); k++)
4349 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4350 for (k = 0; k < ipa_get_param_count (info); k++)
4351 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4352 for (e = node->callees; e; e = e->next_callee)
4353 {
4354 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4355 int count = streamer_read_uhwi (ib);
4356
4357 if (!count)
4358 continue;
4359 vec_safe_grow_cleared (args->jump_functions, count);
4360
4361 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4362 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4363 data_in);
4364 }
4365 for (e = node->indirect_calls; e; e = e->next_callee)
4366 {
4367 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4368 int count = streamer_read_uhwi (ib);
4369
4370 if (count)
4371 {
4372 vec_safe_grow_cleared (args->jump_functions, count);
4373 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4374 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4375 data_in);
4376 }
4377 ipa_read_indirect_edge_info (ib, data_in, e);
4378 }
4379 }
4380
4381 /* Write jump functions for nodes in SET. */
4382
4383 void
4384 ipa_prop_write_jump_functions (void)
4385 {
4386 struct cgraph_node *node;
4387 struct output_block *ob;
4388 unsigned int count = 0;
4389 lto_symtab_encoder_iterator lsei;
4390 lto_symtab_encoder_t encoder;
4391
4392
4393 if (!ipa_node_params_vector.exists ())
4394 return;
4395
4396 ob = create_output_block (LTO_section_jump_functions);
4397 encoder = ob->decl_state->symtab_node_encoder;
4398 ob->cgraph_node = NULL;
4399 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4400 lsei_next_function_in_partition (&lsei))
4401 {
4402 node = lsei_cgraph_node (lsei);
4403 if (cgraph_function_with_gimple_body_p (node)
4404 && IPA_NODE_REF (node) != NULL)
4405 count++;
4406 }
4407
4408 streamer_write_uhwi (ob, count);
4409
4410 /* Process all of the functions. */
4411 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4412 lsei_next_function_in_partition (&lsei))
4413 {
4414 node = lsei_cgraph_node (lsei);
4415 if (cgraph_function_with_gimple_body_p (node)
4416 && IPA_NODE_REF (node) != NULL)
4417 ipa_write_node_info (ob, node);
4418 }
4419 streamer_write_char_stream (ob->main_stream, 0);
4420 produce_asm (ob, NULL);
4421 destroy_output_block (ob);
4422 }
4423
4424 /* Read section in file FILE_DATA of length LEN with data DATA. */
4425
4426 static void
4427 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4428 size_t len)
4429 {
4430 const struct lto_function_header *header =
4431 (const struct lto_function_header *) data;
4432 const int cfg_offset = sizeof (struct lto_function_header);
4433 const int main_offset = cfg_offset + header->cfg_size;
4434 const int string_offset = main_offset + header->main_size;
4435 struct data_in *data_in;
4436 struct lto_input_block ib_main;
4437 unsigned int i;
4438 unsigned int count;
4439
4440 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4441 header->main_size);
4442
4443 data_in =
4444 lto_data_in_create (file_data, (const char *) data + string_offset,
4445 header->string_size, vNULL);
4446 count = streamer_read_uhwi (&ib_main);
4447
4448 for (i = 0; i < count; i++)
4449 {
4450 unsigned int index;
4451 struct cgraph_node *node;
4452 lto_symtab_encoder_t encoder;
4453
4454 index = streamer_read_uhwi (&ib_main);
4455 encoder = file_data->symtab_node_encoder;
4456 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4457 gcc_assert (node->definition);
4458 ipa_read_node_info (&ib_main, node, data_in);
4459 }
4460 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4461 len);
4462 lto_data_in_delete (data_in);
4463 }
4464
4465 /* Read ipcp jump functions. */
4466
4467 void
4468 ipa_prop_read_jump_functions (void)
4469 {
4470 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4471 struct lto_file_decl_data *file_data;
4472 unsigned int j = 0;
4473
4474 ipa_check_create_node_params ();
4475 ipa_check_create_edge_args ();
4476 ipa_register_cgraph_hooks ();
4477
4478 while ((file_data = file_data_vec[j++]))
4479 {
4480 size_t len;
4481 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4482
4483 if (data)
4484 ipa_prop_read_section (file_data, data, len);
4485 }
4486 }
4487
4488 /* After merging units, we can get mismatch in argument counts.
4489 Also decl merging might've rendered parameter lists obsolete.
4490 Also compute called_with_variable_arg info. */
4491
4492 void
4493 ipa_update_after_lto_read (void)
4494 {
4495 ipa_check_create_node_params ();
4496 ipa_check_create_edge_args ();
4497 }
4498
4499 void
4500 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4501 {
4502 int node_ref;
4503 unsigned int count = 0;
4504 lto_symtab_encoder_t encoder;
4505 struct ipa_agg_replacement_value *aggvals, *av;
4506
4507 aggvals = ipa_get_agg_replacements_for_node (node);
4508 encoder = ob->decl_state->symtab_node_encoder;
4509 node_ref = lto_symtab_encoder_encode (encoder, node);
4510 streamer_write_uhwi (ob, node_ref);
4511
4512 for (av = aggvals; av; av = av->next)
4513 count++;
4514 streamer_write_uhwi (ob, count);
4515
4516 for (av = aggvals; av; av = av->next)
4517 {
4518 struct bitpack_d bp;
4519
4520 streamer_write_uhwi (ob, av->offset);
4521 streamer_write_uhwi (ob, av->index);
4522 stream_write_tree (ob, av->value, true);
4523
4524 bp = bitpack_create (ob->main_stream);
4525 bp_pack_value (&bp, av->by_ref, 1);
4526 streamer_write_bitpack (&bp);
4527 }
4528 }
4529
4530 /* Stream in the aggregate value replacement chain for NODE from IB. */
4531
4532 static void
4533 read_agg_replacement_chain (struct lto_input_block *ib,
4534 struct cgraph_node *node,
4535 struct data_in *data_in)
4536 {
4537 struct ipa_agg_replacement_value *aggvals = NULL;
4538 unsigned int count, i;
4539
4540 count = streamer_read_uhwi (ib);
4541 for (i = 0; i <count; i++)
4542 {
4543 struct ipa_agg_replacement_value *av;
4544 struct bitpack_d bp;
4545
4546 av = ggc_alloc_ipa_agg_replacement_value ();
4547 av->offset = streamer_read_uhwi (ib);
4548 av->index = streamer_read_uhwi (ib);
4549 av->value = stream_read_tree (ib, data_in);
4550 bp = streamer_read_bitpack (ib);
4551 av->by_ref = bp_unpack_value (&bp, 1);
4552 av->next = aggvals;
4553 aggvals = av;
4554 }
4555 ipa_set_node_agg_value_chain (node, aggvals);
4556 }
4557
4558 /* Write all aggregate replacement for nodes in set. */
4559
4560 void
4561 ipa_prop_write_all_agg_replacement (void)
4562 {
4563 struct cgraph_node *node;
4564 struct output_block *ob;
4565 unsigned int count = 0;
4566 lto_symtab_encoder_iterator lsei;
4567 lto_symtab_encoder_t encoder;
4568
4569 if (!ipa_node_agg_replacements)
4570 return;
4571
4572 ob = create_output_block (LTO_section_ipcp_transform);
4573 encoder = ob->decl_state->symtab_node_encoder;
4574 ob->cgraph_node = NULL;
4575 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4576 lsei_next_function_in_partition (&lsei))
4577 {
4578 node = lsei_cgraph_node (lsei);
4579 if (cgraph_function_with_gimple_body_p (node)
4580 && ipa_get_agg_replacements_for_node (node) != NULL)
4581 count++;
4582 }
4583
4584 streamer_write_uhwi (ob, count);
4585
4586 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4587 lsei_next_function_in_partition (&lsei))
4588 {
4589 node = lsei_cgraph_node (lsei);
4590 if (cgraph_function_with_gimple_body_p (node)
4591 && ipa_get_agg_replacements_for_node (node) != NULL)
4592 write_agg_replacement_chain (ob, node);
4593 }
4594 streamer_write_char_stream (ob->main_stream, 0);
4595 produce_asm (ob, NULL);
4596 destroy_output_block (ob);
4597 }
4598
4599 /* Read replacements section in file FILE_DATA of length LEN with data
4600 DATA. */
4601
4602 static void
4603 read_replacements_section (struct lto_file_decl_data *file_data,
4604 const char *data,
4605 size_t len)
4606 {
4607 const struct lto_function_header *header =
4608 (const struct lto_function_header *) data;
4609 const int cfg_offset = sizeof (struct lto_function_header);
4610 const int main_offset = cfg_offset + header->cfg_size;
4611 const int string_offset = main_offset + header->main_size;
4612 struct data_in *data_in;
4613 struct lto_input_block ib_main;
4614 unsigned int i;
4615 unsigned int count;
4616
4617 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4618 header->main_size);
4619
4620 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4621 header->string_size, vNULL);
4622 count = streamer_read_uhwi (&ib_main);
4623
4624 for (i = 0; i < count; i++)
4625 {
4626 unsigned int index;
4627 struct cgraph_node *node;
4628 lto_symtab_encoder_t encoder;
4629
4630 index = streamer_read_uhwi (&ib_main);
4631 encoder = file_data->symtab_node_encoder;
4632 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4633 gcc_assert (node->definition);
4634 read_agg_replacement_chain (&ib_main, node, data_in);
4635 }
4636 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4637 len);
4638 lto_data_in_delete (data_in);
4639 }
4640
4641 /* Read IPA-CP aggregate replacements. */
4642
4643 void
4644 ipa_prop_read_all_agg_replacement (void)
4645 {
4646 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4647 struct lto_file_decl_data *file_data;
4648 unsigned int j = 0;
4649
4650 while ((file_data = file_data_vec[j++]))
4651 {
4652 size_t len;
4653 const char *data = lto_get_section_data (file_data,
4654 LTO_section_ipcp_transform,
4655 NULL, &len);
4656 if (data)
4657 read_replacements_section (file_data, data, len);
4658 }
4659 }
4660
4661 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4662 NODE. */
4663
4664 static void
4665 adjust_agg_replacement_values (struct cgraph_node *node,
4666 struct ipa_agg_replacement_value *aggval)
4667 {
4668 struct ipa_agg_replacement_value *v;
4669 int i, c = 0, d = 0, *adj;
4670
4671 if (!node->clone.combined_args_to_skip)
4672 return;
4673
4674 for (v = aggval; v; v = v->next)
4675 {
4676 gcc_assert (v->index >= 0);
4677 if (c < v->index)
4678 c = v->index;
4679 }
4680 c++;
4681
4682 adj = XALLOCAVEC (int, c);
4683 for (i = 0; i < c; i++)
4684 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4685 {
4686 adj[i] = -1;
4687 d++;
4688 }
4689 else
4690 adj[i] = i - d;
4691
4692 for (v = aggval; v; v = v->next)
4693 v->index = adj[v->index];
4694 }
4695
4696
4697 /* Function body transformation phase. */
4698
4699 unsigned int
4700 ipcp_transform_function (struct cgraph_node *node)
4701 {
4702 vec<ipa_param_descriptor> descriptors = vNULL;
4703 struct param_analysis_info *parms_ainfo;
4704 struct ipa_agg_replacement_value *aggval;
4705 gimple_stmt_iterator gsi;
4706 basic_block bb;
4707 int param_count;
4708 bool cfg_changed = false, something_changed = false;
4709
4710 gcc_checking_assert (cfun);
4711 gcc_checking_assert (current_function_decl);
4712
4713 if (dump_file)
4714 fprintf (dump_file, "Modification phase of node %s/%i\n",
4715 node->name (), node->order);
4716
4717 aggval = ipa_get_agg_replacements_for_node (node);
4718 if (!aggval)
4719 return 0;
4720 param_count = count_formal_params (node->decl);
4721 if (param_count == 0)
4722 return 0;
4723 adjust_agg_replacement_values (node, aggval);
4724 if (dump_file)
4725 ipa_dump_agg_replacement_values (dump_file, aggval);
4726 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4727 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4728 descriptors.safe_grow_cleared (param_count);
4729 ipa_populate_param_decls (node, descriptors);
4730
4731 FOR_EACH_BB_FN (bb, cfun)
4732 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4733 {
4734 struct ipa_agg_replacement_value *v;
4735 gimple stmt = gsi_stmt (gsi);
4736 tree rhs, val, t;
4737 HOST_WIDE_INT offset, size;
4738 int index;
4739 bool by_ref, vce;
4740
4741 if (!gimple_assign_load_p (stmt))
4742 continue;
4743 rhs = gimple_assign_rhs1 (stmt);
4744 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4745 continue;
4746
4747 vce = false;
4748 t = rhs;
4749 while (handled_component_p (t))
4750 {
4751 /* V_C_E can do things like convert an array of integers to one
4752 bigger integer and similar things we do not handle below. */
4753 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4754 {
4755 vce = true;
4756 break;
4757 }
4758 t = TREE_OPERAND (t, 0);
4759 }
4760 if (vce)
4761 continue;
4762
4763 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4764 rhs, &index, &offset, &size, &by_ref))
4765 continue;
4766 for (v = aggval; v; v = v->next)
4767 if (v->index == index
4768 && v->offset == offset)
4769 break;
4770 if (!v
4771 || v->by_ref != by_ref
4772 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4773 continue;
4774
4775 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4776 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4777 {
4778 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4779 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4780 else if (TYPE_SIZE (TREE_TYPE (rhs))
4781 == TYPE_SIZE (TREE_TYPE (v->value)))
4782 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4783 else
4784 {
4785 if (dump_file)
4786 {
4787 fprintf (dump_file, " const ");
4788 print_generic_expr (dump_file, v->value, 0);
4789 fprintf (dump_file, " can't be converted to type of ");
4790 print_generic_expr (dump_file, rhs, 0);
4791 fprintf (dump_file, "\n");
4792 }
4793 continue;
4794 }
4795 }
4796 else
4797 val = v->value;
4798
4799 if (dump_file && (dump_flags & TDF_DETAILS))
4800 {
4801 fprintf (dump_file, "Modifying stmt:\n ");
4802 print_gimple_stmt (dump_file, stmt, 0, 0);
4803 }
4804 gimple_assign_set_rhs_from_tree (&gsi, val);
4805 update_stmt (stmt);
4806
4807 if (dump_file && (dump_flags & TDF_DETAILS))
4808 {
4809 fprintf (dump_file, "into:\n ");
4810 print_gimple_stmt (dump_file, stmt, 0, 0);
4811 fprintf (dump_file, "\n");
4812 }
4813
4814 something_changed = true;
4815 if (maybe_clean_eh_stmt (stmt)
4816 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4817 cfg_changed = true;
4818 }
4819
4820 (*ipa_node_agg_replacements)[node->uid] = NULL;
4821 free_parms_ainfo (parms_ainfo, param_count);
4822 descriptors.release ();
4823
4824 if (!something_changed)
4825 return 0;
4826 else if (cfg_changed)
4827 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4828 else
4829 return TODO_update_ssa_only_virtuals;
4830 }