re PR tree-optimization/53787 (Possible IPA-SRA / IPA-CP improvement)
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "dbgcnt.h"
63 #include "domwalk.h"
64
65 /* Intermediate information that we get from alias analysis about a particular
66 parameter in a particular basic_block. When a parameter or the memory it
67 references is marked modified, we use that information in all dominatd
68 blocks without cosulting alias analysis oracle. */
69
70 struct param_aa_status
71 {
72 /* Set when this structure contains meaningful information. If not, the
73 structure describing a dominating BB should be used instead. */
74 bool valid;
75
76 /* Whether we have seen something which might have modified the data in
77 question. PARM is for the parameter itself, REF is for data it points to
78 but using the alias type of individual accesses and PT is the same thing
79 but for computing aggregate pass-through functions using a very inclusive
80 ao_ref. */
81 bool parm_modified, ref_modified, pt_modified;
82 };
83
84 /* Information related to a given BB that used only when looking at function
85 body. */
86
87 struct ipa_bb_info
88 {
89 /* Call graph edges going out of this BB. */
90 vec<cgraph_edge_p> cg_edges;
91 /* Alias analysis statuses of each formal parameter at this bb. */
92 vec<param_aa_status> param_aa_statuses;
93 };
94
95 /* Structure with global information that is only used when looking at function
96 body. */
97
98 struct func_body_info
99 {
100 /* The node that is being analyzed. */
101 cgraph_node *node;
102
103 /* Its info. */
104 struct ipa_node_params *info;
105
106 /* Information about individual BBs. */
107 vec<ipa_bb_info> bb_infos;
108
109 /* Number of parameters. */
110 int param_count;
111
112 /* Number of statements already walked by when analyzing this function. */
113 unsigned int aa_walked;
114 };
115
116 /* Vector where the parameter infos are actually stored. */
117 vec<ipa_node_params> ipa_node_params_vector;
118 /* Vector of known aggregate values in cloned nodes. */
119 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
120 /* Vector where the parameter infos are actually stored. */
121 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
122
123 /* Holders of ipa cgraph hooks: */
124 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
125 static struct cgraph_node_hook_list *node_removal_hook_holder;
126 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
127 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
128 static struct cgraph_node_hook_list *function_insertion_hook_holder;
129
130 /* Description of a reference to an IPA constant. */
131 struct ipa_cst_ref_desc
132 {
133 /* Edge that corresponds to the statement which took the reference. */
134 struct cgraph_edge *cs;
135 /* Linked list of duplicates created when call graph edges are cloned. */
136 struct ipa_cst_ref_desc *next_duplicate;
137 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
138 if out of control. */
139 int refcount;
140 };
141
142 /* Allocation pool for reference descriptions. */
143
144 static alloc_pool ipa_refdesc_pool;
145
146 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
147 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
148
149 static bool
150 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
151 {
152 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
153 struct cl_optimization *os;
154
155 if (!fs_opts)
156 return false;
157 os = TREE_OPTIMIZATION (fs_opts);
158 return !os->x_optimize || !os->x_flag_ipa_cp;
159 }
160
161 /* Return index of the formal whose tree is PTREE in function which corresponds
162 to INFO. */
163
164 static int
165 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
166 {
167 int i, count;
168
169 count = descriptors.length ();
170 for (i = 0; i < count; i++)
171 if (descriptors[i].decl == ptree)
172 return i;
173
174 return -1;
175 }
176
177 /* Return index of the formal whose tree is PTREE in function which corresponds
178 to INFO. */
179
180 int
181 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
182 {
183 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
184 }
185
186 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
187 NODE. */
188
189 static void
190 ipa_populate_param_decls (struct cgraph_node *node,
191 vec<ipa_param_descriptor> &descriptors)
192 {
193 tree fndecl;
194 tree fnargs;
195 tree parm;
196 int param_num;
197
198 fndecl = node->decl;
199 gcc_assert (gimple_has_body_p (fndecl));
200 fnargs = DECL_ARGUMENTS (fndecl);
201 param_num = 0;
202 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
203 {
204 descriptors[param_num].decl = parm;
205 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
206 param_num++;
207 }
208 }
209
210 /* Return how many formal parameters FNDECL has. */
211
212 static inline int
213 count_formal_params (tree fndecl)
214 {
215 tree parm;
216 int count = 0;
217 gcc_assert (gimple_has_body_p (fndecl));
218
219 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
220 count++;
221
222 return count;
223 }
224
225 /* Return the declaration of Ith formal parameter of the function corresponding
226 to INFO. Note there is no setter function as this array is built just once
227 using ipa_initialize_node_params. */
228
229 void
230 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
231 {
232 fprintf (file, "param #%i", i);
233 if (info->descriptors[i].decl)
234 {
235 fprintf (file, " ");
236 print_generic_expr (file, info->descriptors[i].decl, 0);
237 }
238 }
239
240 /* Initialize the ipa_node_params structure associated with NODE
241 to hold PARAM_COUNT parameters. */
242
243 void
244 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
245 {
246 struct ipa_node_params *info = IPA_NODE_REF (node);
247
248 if (!info->descriptors.exists () && param_count)
249 info->descriptors.safe_grow_cleared (param_count);
250 }
251
252 /* Initialize the ipa_node_params structure associated with NODE by counting
253 the function parameters, creating the descriptors and populating their
254 param_decls. */
255
256 void
257 ipa_initialize_node_params (struct cgraph_node *node)
258 {
259 struct ipa_node_params *info = IPA_NODE_REF (node);
260
261 if (!info->descriptors.exists ())
262 {
263 ipa_alloc_node_params (node, count_formal_params (node->decl));
264 ipa_populate_param_decls (node, info->descriptors);
265 }
266 }
267
268 /* Print the jump functions associated with call graph edge CS to file F. */
269
270 static void
271 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
272 {
273 int i, count;
274
275 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
276 for (i = 0; i < count; i++)
277 {
278 struct ipa_jump_func *jump_func;
279 enum jump_func_type type;
280
281 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
282 type = jump_func->type;
283
284 fprintf (f, " param %d: ", i);
285 if (type == IPA_JF_UNKNOWN)
286 fprintf (f, "UNKNOWN\n");
287 else if (type == IPA_JF_KNOWN_TYPE)
288 {
289 fprintf (f, "KNOWN TYPE: base ");
290 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
291 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
292 jump_func->value.known_type.offset);
293 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
294 fprintf (f, "\n");
295 }
296 else if (type == IPA_JF_CONST)
297 {
298 tree val = jump_func->value.constant.value;
299 fprintf (f, "CONST: ");
300 print_generic_expr (f, val, 0);
301 if (TREE_CODE (val) == ADDR_EXPR
302 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
303 {
304 fprintf (f, " -> ");
305 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
306 0);
307 }
308 fprintf (f, "\n");
309 }
310 else if (type == IPA_JF_PASS_THROUGH)
311 {
312 fprintf (f, "PASS THROUGH: ");
313 fprintf (f, "%d, op %s",
314 jump_func->value.pass_through.formal_id,
315 get_tree_code_name(jump_func->value.pass_through.operation));
316 if (jump_func->value.pass_through.operation != NOP_EXPR)
317 {
318 fprintf (f, " ");
319 print_generic_expr (f,
320 jump_func->value.pass_through.operand, 0);
321 }
322 if (jump_func->value.pass_through.agg_preserved)
323 fprintf (f, ", agg_preserved");
324 if (jump_func->value.pass_through.type_preserved)
325 fprintf (f, ", type_preserved");
326 fprintf (f, "\n");
327 }
328 else if (type == IPA_JF_ANCESTOR)
329 {
330 fprintf (f, "ANCESTOR: ");
331 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
332 jump_func->value.ancestor.formal_id,
333 jump_func->value.ancestor.offset);
334 print_generic_expr (f, jump_func->value.ancestor.type, 0);
335 if (jump_func->value.ancestor.agg_preserved)
336 fprintf (f, ", agg_preserved");
337 if (jump_func->value.ancestor.type_preserved)
338 fprintf (f, ", type_preserved");
339 fprintf (f, "\n");
340 }
341
342 if (jump_func->agg.items)
343 {
344 struct ipa_agg_jf_item *item;
345 int j;
346
347 fprintf (f, " Aggregate passed by %s:\n",
348 jump_func->agg.by_ref ? "reference" : "value");
349 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
350 {
351 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
352 item->offset);
353 if (TYPE_P (item->value))
354 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
355 tree_to_uhwi (TYPE_SIZE (item->value)));
356 else
357 {
358 fprintf (f, "cst: ");
359 print_generic_expr (f, item->value, 0);
360 }
361 fprintf (f, "\n");
362 }
363 }
364 }
365 }
366
367
368 /* Print the jump functions of all arguments on all call graph edges going from
369 NODE to file F. */
370
371 void
372 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
373 {
374 struct cgraph_edge *cs;
375
376 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
377 node->order);
378 for (cs = node->callees; cs; cs = cs->next_callee)
379 {
380 if (!ipa_edge_args_info_available_for_edge_p (cs))
381 continue;
382
383 fprintf (f, " callsite %s/%i -> %s/%i : \n",
384 xstrdup (node->name ()), node->order,
385 xstrdup (cs->callee->name ()),
386 cs->callee->order);
387 ipa_print_node_jump_functions_for_edge (f, cs);
388 }
389
390 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
391 {
392 struct cgraph_indirect_call_info *ii;
393 if (!ipa_edge_args_info_available_for_edge_p (cs))
394 continue;
395
396 ii = cs->indirect_info;
397 if (ii->agg_contents)
398 fprintf (f, " indirect %s callsite, calling param %i, "
399 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
400 ii->member_ptr ? "member ptr" : "aggregate",
401 ii->param_index, ii->offset,
402 ii->by_ref ? "by reference" : "by_value");
403 else
404 fprintf (f, " indirect %s callsite, calling param %i, "
405 "offset " HOST_WIDE_INT_PRINT_DEC,
406 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
407 ii->offset);
408
409 if (cs->call_stmt)
410 {
411 fprintf (f, ", for stmt ");
412 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
413 }
414 else
415 fprintf (f, "\n");
416 ipa_print_node_jump_functions_for_edge (f, cs);
417 }
418 }
419
420 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
421
422 void
423 ipa_print_all_jump_functions (FILE *f)
424 {
425 struct cgraph_node *node;
426
427 fprintf (f, "\nJump functions:\n");
428 FOR_EACH_FUNCTION (node)
429 {
430 ipa_print_node_jump_functions (f, node);
431 }
432 }
433
434 /* Set JFUNC to be a known type jump function. */
435
436 static void
437 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
438 tree base_type, tree component_type)
439 {
440 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
441 && TYPE_BINFO (component_type));
442 if (!flag_devirtualize)
443 return;
444 gcc_assert (BINFO_VTABLE (TYPE_BINFO (component_type)));
445 jfunc->type = IPA_JF_KNOWN_TYPE;
446 jfunc->value.known_type.offset = offset,
447 jfunc->value.known_type.base_type = base_type;
448 jfunc->value.known_type.component_type = component_type;
449 gcc_assert (component_type);
450 }
451
452 /* Set JFUNC to be a copy of another jmp (to be used by jump function
453 combination code). The two functions will share their rdesc. */
454
455 static void
456 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
457 struct ipa_jump_func *src)
458
459 {
460 gcc_checking_assert (src->type == IPA_JF_CONST);
461 dst->type = IPA_JF_CONST;
462 dst->value.constant = src->value.constant;
463 }
464
465 /* Set JFUNC to be a constant jmp function. */
466
467 static void
468 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
469 struct cgraph_edge *cs)
470 {
471 constant = unshare_expr (constant);
472 if (constant && EXPR_P (constant))
473 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
474 jfunc->type = IPA_JF_CONST;
475 jfunc->value.constant.value = unshare_expr_without_location (constant);
476
477 if (TREE_CODE (constant) == ADDR_EXPR
478 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
479 {
480 struct ipa_cst_ref_desc *rdesc;
481 if (!ipa_refdesc_pool)
482 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
483 sizeof (struct ipa_cst_ref_desc), 32);
484
485 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
486 rdesc->cs = cs;
487 rdesc->next_duplicate = NULL;
488 rdesc->refcount = 1;
489 jfunc->value.constant.rdesc = rdesc;
490 }
491 else
492 jfunc->value.constant.rdesc = NULL;
493 }
494
495 /* Set JFUNC to be a simple pass-through jump function. */
496 static void
497 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
498 bool agg_preserved, bool type_preserved)
499 {
500 jfunc->type = IPA_JF_PASS_THROUGH;
501 jfunc->value.pass_through.operand = NULL_TREE;
502 jfunc->value.pass_through.formal_id = formal_id;
503 jfunc->value.pass_through.operation = NOP_EXPR;
504 jfunc->value.pass_through.agg_preserved = agg_preserved;
505 jfunc->value.pass_through.type_preserved = type_preserved;
506 }
507
508 /* Set JFUNC to be an arithmetic pass through jump function. */
509
510 static void
511 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
512 tree operand, enum tree_code operation)
513 {
514 jfunc->type = IPA_JF_PASS_THROUGH;
515 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
516 jfunc->value.pass_through.formal_id = formal_id;
517 jfunc->value.pass_through.operation = operation;
518 jfunc->value.pass_through.agg_preserved = false;
519 jfunc->value.pass_through.type_preserved = false;
520 }
521
522 /* Set JFUNC to be an ancestor jump function. */
523
524 static void
525 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
526 tree type, int formal_id, bool agg_preserved,
527 bool type_preserved)
528 {
529 if (!flag_devirtualize)
530 type_preserved = false;
531 gcc_assert (!type_preserved
532 || (TREE_CODE (type) == RECORD_TYPE
533 && TYPE_BINFO (type)
534 && BINFO_VTABLE (TYPE_BINFO (type))));
535 jfunc->type = IPA_JF_ANCESTOR;
536 jfunc->value.ancestor.formal_id = formal_id;
537 jfunc->value.ancestor.offset = offset;
538 jfunc->value.ancestor.type = type_preserved ? type : NULL;
539 jfunc->value.ancestor.agg_preserved = agg_preserved;
540 jfunc->value.ancestor.type_preserved = type_preserved;
541 }
542
543 /* Extract the acual BINFO being described by JFUNC which must be a known type
544 jump function. */
545
546 tree
547 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
548 {
549 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
550 if (!base_binfo)
551 return NULL_TREE;
552 return get_binfo_at_offset (base_binfo,
553 jfunc->value.known_type.offset,
554 jfunc->value.known_type.component_type);
555 }
556
557 /* Get IPA BB information about the given BB. FBI is the context of analyzis
558 of this function body. */
559
560 static struct ipa_bb_info *
561 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
562 {
563 gcc_checking_assert (fbi);
564 return &fbi->bb_infos[bb->index];
565 }
566
567 /* Structure to be passed in between detect_type_change and
568 check_stmt_for_type_change. */
569
570 struct type_change_info
571 {
572 /* Offset into the object where there is the virtual method pointer we are
573 looking for. */
574 HOST_WIDE_INT offset;
575 /* The declaration or SSA_NAME pointer of the base that we are checking for
576 type change. */
577 tree object;
578 /* If we actually can tell the type that the object has changed to, it is
579 stored in this field. Otherwise it remains NULL_TREE. */
580 tree known_current_type;
581 /* Set to true if dynamic type change has been detected. */
582 bool type_maybe_changed;
583 /* Set to true if multiple types have been encountered. known_current_type
584 must be disregarded in that case. */
585 bool multiple_types_encountered;
586 };
587
588 /* Return true if STMT can modify a virtual method table pointer.
589
590 This function makes special assumptions about both constructors and
591 destructors which are all the functions that are allowed to alter the VMT
592 pointers. It assumes that destructors begin with assignment into all VMT
593 pointers and that constructors essentially look in the following way:
594
595 1) The very first thing they do is that they call constructors of ancestor
596 sub-objects that have them.
597
598 2) Then VMT pointers of this and all its ancestors is set to new values
599 corresponding to the type corresponding to the constructor.
600
601 3) Only afterwards, other stuff such as constructor of member sub-objects
602 and the code written by the user is run. Only this may include calling
603 virtual functions, directly or indirectly.
604
605 There is no way to call a constructor of an ancestor sub-object in any
606 other way.
607
608 This means that we do not have to care whether constructors get the correct
609 type information because they will always change it (in fact, if we define
610 the type to be given by the VMT pointer, it is undefined).
611
612 The most important fact to derive from the above is that if, for some
613 statement in the section 3, we try to detect whether the dynamic type has
614 changed, we can safely ignore all calls as we examine the function body
615 backwards until we reach statements in section 2 because these calls cannot
616 be ancestor constructors or destructors (if the input is not bogus) and so
617 do not change the dynamic type (this holds true only for automatically
618 allocated objects but at the moment we devirtualize only these). We then
619 must detect that statements in section 2 change the dynamic type and can try
620 to derive the new type. That is enough and we can stop, we will never see
621 the calls into constructors of sub-objects in this code. Therefore we can
622 safely ignore all call statements that we traverse.
623 */
624
625 static bool
626 stmt_may_be_vtbl_ptr_store (gimple stmt)
627 {
628 if (is_gimple_call (stmt))
629 return false;
630 /* TODO: Skip clobbers, doing so triggers problem in PR60306. */
631 else if (is_gimple_assign (stmt))
632 {
633 tree lhs = gimple_assign_lhs (stmt);
634
635 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
636 {
637 if (flag_strict_aliasing
638 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
639 return false;
640
641 if (TREE_CODE (lhs) == COMPONENT_REF
642 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
643 return false;
644 /* In the future we might want to use get_base_ref_and_offset to find
645 if there is a field corresponding to the offset and if so, proceed
646 almost like if it was a component ref. */
647 }
648 }
649 return true;
650 }
651
652 /* If STMT can be proved to be an assignment to the virtual method table
653 pointer of ANALYZED_OBJ and the type associated with the new table
654 identified, return the type. Otherwise return NULL_TREE. */
655
656 static tree
657 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
658 {
659 HOST_WIDE_INT offset, size, max_size;
660 tree lhs, rhs, base, binfo;
661
662 if (!gimple_assign_single_p (stmt))
663 return NULL_TREE;
664
665 lhs = gimple_assign_lhs (stmt);
666 rhs = gimple_assign_rhs1 (stmt);
667 if (TREE_CODE (lhs) != COMPONENT_REF
668 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
669 return NULL_TREE;
670
671 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
672 if (offset != tci->offset
673 || size != POINTER_SIZE
674 || max_size != POINTER_SIZE)
675 return NULL_TREE;
676 if (TREE_CODE (base) == MEM_REF)
677 {
678 if (TREE_CODE (tci->object) != MEM_REF
679 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
680 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
681 TREE_OPERAND (base, 1)))
682 return NULL_TREE;
683 }
684 else if (tci->object != base)
685 return NULL_TREE;
686
687 binfo = vtable_pointer_value_to_binfo (rhs);
688
689 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
690 base of outer type. In this case we would need to either
691 work on binfos or translate it back to outer type and offset.
692 KNOWN_TYPE jump functions are not ready for that, yet. */
693 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
694 return NULL;
695
696 return BINFO_TYPE (binfo);
697 }
698
699 /* Callback of walk_aliased_vdefs and a helper function for
700 detect_type_change to check whether a particular statement may modify
701 the virtual table pointer, and if possible also determine the new type of
702 the (sub-)object. It stores its result into DATA, which points to a
703 type_change_info structure. */
704
705 static bool
706 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
707 {
708 gimple stmt = SSA_NAME_DEF_STMT (vdef);
709 struct type_change_info *tci = (struct type_change_info *) data;
710
711 if (stmt_may_be_vtbl_ptr_store (stmt))
712 {
713 tree type;
714 type = extr_type_from_vtbl_ptr_store (stmt, tci);
715 if (tci->type_maybe_changed
716 && type != tci->known_current_type)
717 tci->multiple_types_encountered = true;
718 tci->known_current_type = type;
719 tci->type_maybe_changed = true;
720 return true;
721 }
722 else
723 return false;
724 }
725
726
727
728 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
729 callsite CALL) by looking for assignments to its virtual table pointer. If
730 it is, return true and fill in the jump function JFUNC with relevant type
731 information or set it to unknown. ARG is the object itself (not a pointer
732 to it, unless dereferenced). BASE is the base of the memory access as
733 returned by get_ref_base_and_extent, as is the offset. */
734
735 static bool
736 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
737 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
738 {
739 struct type_change_info tci;
740 ao_ref ao;
741
742 gcc_checking_assert (DECL_P (arg)
743 || TREE_CODE (arg) == MEM_REF
744 || handled_component_p (arg));
745 /* Const calls cannot call virtual methods through VMT and so type changes do
746 not matter. */
747 if (!flag_devirtualize || !gimple_vuse (call)
748 /* Be sure expected_type is polymorphic. */
749 || !comp_type
750 || TREE_CODE (comp_type) != RECORD_TYPE
751 || !TYPE_BINFO (comp_type)
752 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
753 return true;
754
755 /* C++ methods are not allowed to change THIS pointer unless they
756 are constructors or destructors. */
757 if (TREE_CODE (base) == MEM_REF
758 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
759 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
760 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base, 0))) == PARM_DECL
761 && TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
762 && !DECL_CXX_CONSTRUCTOR_P (current_function_decl)
763 && !DECL_CXX_DESTRUCTOR_P (current_function_decl)
764 && (SSA_NAME_VAR (TREE_OPERAND (base, 0))
765 == DECL_ARGUMENTS (current_function_decl)))
766 return false;
767
768 ao_ref_init (&ao, arg);
769 ao.base = base;
770 ao.offset = offset;
771 ao.size = POINTER_SIZE;
772 ao.max_size = ao.size;
773
774 tci.offset = offset;
775 tci.object = get_base_address (arg);
776 tci.known_current_type = NULL_TREE;
777 tci.type_maybe_changed = false;
778 tci.multiple_types_encountered = false;
779
780 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
781 &tci, NULL);
782 if (!tci.type_maybe_changed)
783 return false;
784
785 if (!tci.known_current_type
786 || tci.multiple_types_encountered
787 || offset != 0)
788 jfunc->type = IPA_JF_UNKNOWN;
789 else
790 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
791
792 return true;
793 }
794
795 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
796 SSA name (its dereference will become the base and the offset is assumed to
797 be zero). */
798
799 static bool
800 detect_type_change_ssa (tree arg, tree comp_type,
801 gimple call, struct ipa_jump_func *jfunc)
802 {
803 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
804 if (!flag_devirtualize
805 || !POINTER_TYPE_P (TREE_TYPE (arg)))
806 return false;
807
808 arg = build2 (MEM_REF, ptr_type_node, arg,
809 build_int_cst (ptr_type_node, 0));
810
811 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
812 }
813
814 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
815 boolean variable pointed to by DATA. */
816
817 static bool
818 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
819 void *data)
820 {
821 bool *b = (bool *) data;
822 *b = true;
823 return true;
824 }
825
826 /* Return true if we have already walked so many statements in AA that we
827 should really just start giving up. */
828
829 static bool
830 aa_overwalked (struct func_body_info *fbi)
831 {
832 gcc_checking_assert (fbi);
833 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
834 }
835
836 /* Find the nearest valid aa status for parameter specified by INDEX that
837 dominates BB. */
838
839 static struct param_aa_status *
840 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
841 int index)
842 {
843 while (true)
844 {
845 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
846 if (!bb)
847 return NULL;
848 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
849 if (!bi->param_aa_statuses.is_empty ()
850 && bi->param_aa_statuses[index].valid)
851 return &bi->param_aa_statuses[index];
852 }
853 }
854
855 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
856 structures and/or intialize the result with a dominating description as
857 necessary. */
858
859 static struct param_aa_status *
860 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
861 int index)
862 {
863 gcc_checking_assert (fbi);
864 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
865 if (bi->param_aa_statuses.is_empty ())
866 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
867 struct param_aa_status *paa = &bi->param_aa_statuses[index];
868 if (!paa->valid)
869 {
870 gcc_checking_assert (!paa->parm_modified
871 && !paa->ref_modified
872 && !paa->pt_modified);
873 struct param_aa_status *dom_paa;
874 dom_paa = find_dominating_aa_status (fbi, bb, index);
875 if (dom_paa)
876 *paa = *dom_paa;
877 else
878 paa->valid = true;
879 }
880
881 return paa;
882 }
883
884 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
885 a value known not to be modified in this function before reaching the
886 statement STMT. FBI holds information about the function we have so far
887 gathered but do not survive the summary building stage. */
888
889 static bool
890 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
891 gimple stmt, tree parm_load)
892 {
893 struct param_aa_status *paa;
894 bool modified = false;
895 ao_ref refd;
896
897 /* FIXME: FBI can be NULL if we are being called from outside
898 ipa_node_analysis or ipcp_transform_function, which currently happens
899 during inlining analysis. It would be great to extend fbi's lifetime and
900 always have it. Currently, we are just not afraid of too much walking in
901 that case. */
902 if (fbi)
903 {
904 if (aa_overwalked (fbi))
905 return false;
906 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
907 if (paa->parm_modified)
908 return false;
909 }
910 else
911 paa = NULL;
912
913 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
914 ao_ref_init (&refd, parm_load);
915 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
916 &modified, NULL);
917 if (fbi)
918 fbi->aa_walked += walked;
919 if (paa && modified)
920 paa->parm_modified = true;
921 return !modified;
922 }
923
924 /* If STMT is an assignment that loads a value from an parameter declaration,
925 return the index of the parameter in ipa_node_params which has not been
926 modified. Otherwise return -1. */
927
928 static int
929 load_from_unmodified_param (struct func_body_info *fbi,
930 vec<ipa_param_descriptor> descriptors,
931 gimple stmt)
932 {
933 int index;
934 tree op1;
935
936 if (!gimple_assign_single_p (stmt))
937 return -1;
938
939 op1 = gimple_assign_rhs1 (stmt);
940 if (TREE_CODE (op1) != PARM_DECL)
941 return -1;
942
943 index = ipa_get_param_decl_index_1 (descriptors, op1);
944 if (index < 0
945 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
946 return -1;
947
948 return index;
949 }
950
951 /* Return true if memory reference REF (which must be a load through parameter
952 with INDEX) loads data that are known to be unmodified in this function
953 before reaching statement STMT. */
954
955 static bool
956 parm_ref_data_preserved_p (struct func_body_info *fbi,
957 int index, gimple stmt, tree ref)
958 {
959 struct param_aa_status *paa;
960 bool modified = false;
961 ao_ref refd;
962
963 /* FIXME: FBI can be NULL if we are being called from outside
964 ipa_node_analysis or ipcp_transform_function, which currently happens
965 during inlining analysis. It would be great to extend fbi's lifetime and
966 always have it. Currently, we are just not afraid of too much walking in
967 that case. */
968 if (fbi)
969 {
970 if (aa_overwalked (fbi))
971 return false;
972 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
973 if (paa->ref_modified)
974 return false;
975 }
976 else
977 paa = NULL;
978
979 gcc_checking_assert (gimple_vuse (stmt));
980 ao_ref_init (&refd, ref);
981 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
982 &modified, NULL);
983 if (fbi)
984 fbi->aa_walked += walked;
985 if (paa && modified)
986 paa->ref_modified = true;
987 return !modified;
988 }
989
990 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
991 is known to be unmodified in this function before reaching call statement
992 CALL into which it is passed. FBI describes the function body. */
993
994 static bool
995 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
996 gimple call, tree parm)
997 {
998 bool modified = false;
999 ao_ref refd;
1000
1001 /* It's unnecessary to calculate anything about memory contnets for a const
1002 function because it is not goin to use it. But do not cache the result
1003 either. Also, no such calculations for non-pointers. */
1004 if (!gimple_vuse (call)
1005 || !POINTER_TYPE_P (TREE_TYPE (parm))
1006 || aa_overwalked (fbi))
1007 return false;
1008
1009 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1010 index);
1011 if (paa->pt_modified)
1012 return false;
1013
1014 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1015 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1016 &modified, NULL);
1017 fbi->aa_walked += walked;
1018 if (modified)
1019 paa->pt_modified = true;
1020 return !modified;
1021 }
1022
1023 /* Return true if we can prove that OP is a memory reference loading unmodified
1024 data from an aggregate passed as a parameter and if the aggregate is passed
1025 by reference, that the alias type of the load corresponds to the type of the
1026 formal parameter (so that we can rely on this type for TBAA in callers).
1027 INFO and PARMS_AINFO describe parameters of the current function (but the
1028 latter can be NULL), STMT is the load statement. If function returns true,
1029 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1030 within the aggregate and whether it is a load from a value passed by
1031 reference respectively. */
1032
1033 static bool
1034 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1035 vec<ipa_param_descriptor> descriptors,
1036 gimple stmt, tree op, int *index_p,
1037 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1038 bool *by_ref_p)
1039 {
1040 int index;
1041 HOST_WIDE_INT size, max_size;
1042 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1043
1044 if (max_size == -1 || max_size != size || *offset_p < 0)
1045 return false;
1046
1047 if (DECL_P (base))
1048 {
1049 int index = ipa_get_param_decl_index_1 (descriptors, base);
1050 if (index >= 0
1051 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1052 {
1053 *index_p = index;
1054 *by_ref_p = false;
1055 if (size_p)
1056 *size_p = size;
1057 return true;
1058 }
1059 return false;
1060 }
1061
1062 if (TREE_CODE (base) != MEM_REF
1063 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1064 || !integer_zerop (TREE_OPERAND (base, 1)))
1065 return false;
1066
1067 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1068 {
1069 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1070 index = ipa_get_param_decl_index_1 (descriptors, parm);
1071 }
1072 else
1073 {
1074 /* This branch catches situations where a pointer parameter is not a
1075 gimple register, for example:
1076
1077 void hip7(S*) (struct S * p)
1078 {
1079 void (*<T2e4>) (struct S *) D.1867;
1080 struct S * p.1;
1081
1082 <bb 2>:
1083 p.1_1 = p;
1084 D.1867_2 = p.1_1->f;
1085 D.1867_2 ();
1086 gdp = &p;
1087 */
1088
1089 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1090 index = load_from_unmodified_param (fbi, descriptors, def);
1091 }
1092
1093 if (index >= 0
1094 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1095 {
1096 *index_p = index;
1097 *by_ref_p = true;
1098 if (size_p)
1099 *size_p = size;
1100 return true;
1101 }
1102 return false;
1103 }
1104
1105 /* Just like the previous function, just without the param_analysis_info
1106 pointer, for users outside of this file. */
1107
1108 bool
1109 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1110 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1111 bool *by_ref_p)
1112 {
1113 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1114 offset_p, NULL, by_ref_p);
1115 }
1116
1117 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1118 of an assignment statement STMT, try to determine whether we are actually
1119 handling any of the following cases and construct an appropriate jump
1120 function into JFUNC if so:
1121
1122 1) The passed value is loaded from a formal parameter which is not a gimple
1123 register (most probably because it is addressable, the value has to be
1124 scalar) and we can guarantee the value has not changed. This case can
1125 therefore be described by a simple pass-through jump function. For example:
1126
1127 foo (int a)
1128 {
1129 int a.0;
1130
1131 a.0_2 = a;
1132 bar (a.0_2);
1133
1134 2) The passed value can be described by a simple arithmetic pass-through
1135 jump function. E.g.
1136
1137 foo (int a)
1138 {
1139 int D.2064;
1140
1141 D.2064_4 = a.1(D) + 4;
1142 bar (D.2064_4);
1143
1144 This case can also occur in combination of the previous one, e.g.:
1145
1146 foo (int a, int z)
1147 {
1148 int a.0;
1149 int D.2064;
1150
1151 a.0_3 = a;
1152 D.2064_4 = a.0_3 + 4;
1153 foo (D.2064_4);
1154
1155 3) The passed value is an address of an object within another one (which
1156 also passed by reference). Such situations are described by an ancestor
1157 jump function and describe situations such as:
1158
1159 B::foo() (struct B * const this)
1160 {
1161 struct A * D.1845;
1162
1163 D.1845_2 = &this_1(D)->D.1748;
1164 A::bar (D.1845_2);
1165
1166 INFO is the structure describing individual parameters access different
1167 stages of IPA optimizations. PARMS_AINFO contains the information that is
1168 only needed for intraprocedural analysis. */
1169
1170 static void
1171 compute_complex_assign_jump_func (struct func_body_info *fbi,
1172 struct ipa_node_params *info,
1173 struct ipa_jump_func *jfunc,
1174 gimple call, gimple stmt, tree name,
1175 tree param_type)
1176 {
1177 HOST_WIDE_INT offset, size, max_size;
1178 tree op1, tc_ssa, base, ssa;
1179 int index;
1180
1181 op1 = gimple_assign_rhs1 (stmt);
1182
1183 if (TREE_CODE (op1) == SSA_NAME)
1184 {
1185 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1186 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1187 else
1188 index = load_from_unmodified_param (fbi, info->descriptors,
1189 SSA_NAME_DEF_STMT (op1));
1190 tc_ssa = op1;
1191 }
1192 else
1193 {
1194 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1195 tc_ssa = gimple_assign_lhs (stmt);
1196 }
1197
1198 if (index >= 0)
1199 {
1200 tree op2 = gimple_assign_rhs2 (stmt);
1201
1202 if (op2)
1203 {
1204 if (!is_gimple_ip_invariant (op2)
1205 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1206 && !useless_type_conversion_p (TREE_TYPE (name),
1207 TREE_TYPE (op1))))
1208 return;
1209
1210 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1211 gimple_assign_rhs_code (stmt));
1212 }
1213 else if (gimple_assign_single_p (stmt))
1214 {
1215 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1216 bool type_p = false;
1217
1218 if (param_type && POINTER_TYPE_P (param_type))
1219 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1220 call, jfunc);
1221 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1222 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1223 }
1224 return;
1225 }
1226
1227 if (TREE_CODE (op1) != ADDR_EXPR)
1228 return;
1229 op1 = TREE_OPERAND (op1, 0);
1230 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1231 return;
1232 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1233 if (TREE_CODE (base) != MEM_REF
1234 /* If this is a varying address, punt. */
1235 || max_size == -1
1236 || max_size != size)
1237 return;
1238 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1239 ssa = TREE_OPERAND (base, 0);
1240 if (TREE_CODE (ssa) != SSA_NAME
1241 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1242 || offset < 0)
1243 return;
1244
1245 /* Dynamic types are changed in constructors and destructors. */
1246 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1247 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1248 {
1249 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1250 call, jfunc, offset);
1251 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1252 ipa_set_ancestor_jf (jfunc, offset,
1253 type_p ? TREE_TYPE (param_type) : NULL, index,
1254 parm_ref_data_pass_through_p (fbi, index,
1255 call, ssa), type_p);
1256 }
1257 }
1258
1259 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1260 it looks like:
1261
1262 iftmp.1_3 = &obj_2(D)->D.1762;
1263
1264 The base of the MEM_REF must be a default definition SSA NAME of a
1265 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1266 whole MEM_REF expression is returned and the offset calculated from any
1267 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1268 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1269
1270 static tree
1271 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1272 {
1273 HOST_WIDE_INT size, max_size;
1274 tree expr, parm, obj;
1275
1276 if (!gimple_assign_single_p (assign))
1277 return NULL_TREE;
1278 expr = gimple_assign_rhs1 (assign);
1279
1280 if (TREE_CODE (expr) != ADDR_EXPR)
1281 return NULL_TREE;
1282 expr = TREE_OPERAND (expr, 0);
1283 obj = expr;
1284 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1285
1286 if (TREE_CODE (expr) != MEM_REF
1287 /* If this is a varying address, punt. */
1288 || max_size == -1
1289 || max_size != size
1290 || *offset < 0)
1291 return NULL_TREE;
1292 parm = TREE_OPERAND (expr, 0);
1293 if (TREE_CODE (parm) != SSA_NAME
1294 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1295 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1296 return NULL_TREE;
1297
1298 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1299 *obj_p = obj;
1300 return expr;
1301 }
1302
1303
1304 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1305 statement PHI, try to find out whether NAME is in fact a
1306 multiple-inheritance typecast from a descendant into an ancestor of a formal
1307 parameter and thus can be described by an ancestor jump function and if so,
1308 write the appropriate function into JFUNC.
1309
1310 Essentially we want to match the following pattern:
1311
1312 if (obj_2(D) != 0B)
1313 goto <bb 3>;
1314 else
1315 goto <bb 4>;
1316
1317 <bb 3>:
1318 iftmp.1_3 = &obj_2(D)->D.1762;
1319
1320 <bb 4>:
1321 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1322 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1323 return D.1879_6; */
1324
1325 static void
1326 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1327 struct ipa_node_params *info,
1328 struct ipa_jump_func *jfunc,
1329 gimple call, gimple phi, tree param_type)
1330 {
1331 HOST_WIDE_INT offset;
1332 gimple assign, cond;
1333 basic_block phi_bb, assign_bb, cond_bb;
1334 tree tmp, parm, expr, obj;
1335 int index, i;
1336
1337 if (gimple_phi_num_args (phi) != 2)
1338 return;
1339
1340 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1341 tmp = PHI_ARG_DEF (phi, 0);
1342 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1343 tmp = PHI_ARG_DEF (phi, 1);
1344 else
1345 return;
1346 if (TREE_CODE (tmp) != SSA_NAME
1347 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1348 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1349 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1350 return;
1351
1352 assign = SSA_NAME_DEF_STMT (tmp);
1353 assign_bb = gimple_bb (assign);
1354 if (!single_pred_p (assign_bb))
1355 return;
1356 expr = get_ancestor_addr_info (assign, &obj, &offset);
1357 if (!expr)
1358 return;
1359 parm = TREE_OPERAND (expr, 0);
1360 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1361 if (index < 0)
1362 return;
1363
1364 cond_bb = single_pred (assign_bb);
1365 cond = last_stmt (cond_bb);
1366 if (!cond
1367 || gimple_code (cond) != GIMPLE_COND
1368 || gimple_cond_code (cond) != NE_EXPR
1369 || gimple_cond_lhs (cond) != parm
1370 || !integer_zerop (gimple_cond_rhs (cond)))
1371 return;
1372
1373 phi_bb = gimple_bb (phi);
1374 for (i = 0; i < 2; i++)
1375 {
1376 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1377 if (pred != assign_bb && pred != cond_bb)
1378 return;
1379 }
1380
1381 bool type_p = false;
1382 if (param_type && POINTER_TYPE_P (param_type))
1383 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1384 call, jfunc, offset);
1385 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1386 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1387 index,
1388 parm_ref_data_pass_through_p (fbi, index, call, parm),
1389 type_p);
1390 }
1391
1392 /* Given OP which is passed as an actual argument to a called function,
1393 determine if it is possible to construct a KNOWN_TYPE jump function for it
1394 and if so, create one and store it to JFUNC.
1395 EXPECTED_TYPE represents a type the argument should be in */
1396
1397 static void
1398 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1399 gimple call, tree expected_type)
1400 {
1401 HOST_WIDE_INT offset, size, max_size;
1402 tree base;
1403
1404 if (!flag_devirtualize
1405 || TREE_CODE (op) != ADDR_EXPR
1406 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1407 /* Be sure expected_type is polymorphic. */
1408 || !expected_type
1409 || TREE_CODE (expected_type) != RECORD_TYPE
1410 || !TYPE_BINFO (expected_type)
1411 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1412 return;
1413
1414 op = TREE_OPERAND (op, 0);
1415 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1416 if (!DECL_P (base)
1417 || max_size == -1
1418 || max_size != size
1419 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1420 || is_global_var (base))
1421 return;
1422
1423 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1424 return;
1425
1426 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1427 expected_type);
1428 }
1429
1430 /* Inspect the given TYPE and return true iff it has the same structure (the
1431 same number of fields of the same types) as a C++ member pointer. If
1432 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1433 corresponding fields there. */
1434
1435 static bool
1436 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1437 {
1438 tree fld;
1439
1440 if (TREE_CODE (type) != RECORD_TYPE)
1441 return false;
1442
1443 fld = TYPE_FIELDS (type);
1444 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1445 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1446 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1447 return false;
1448
1449 if (method_ptr)
1450 *method_ptr = fld;
1451
1452 fld = DECL_CHAIN (fld);
1453 if (!fld || INTEGRAL_TYPE_P (fld)
1454 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1455 return false;
1456 if (delta)
1457 *delta = fld;
1458
1459 if (DECL_CHAIN (fld))
1460 return false;
1461
1462 return true;
1463 }
1464
1465 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1466 return the rhs of its defining statement. Otherwise return RHS as it
1467 is. */
1468
1469 static inline tree
1470 get_ssa_def_if_simple_copy (tree rhs)
1471 {
1472 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1473 {
1474 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1475
1476 if (gimple_assign_single_p (def_stmt))
1477 rhs = gimple_assign_rhs1 (def_stmt);
1478 else
1479 break;
1480 }
1481 return rhs;
1482 }
1483
1484 /* Simple linked list, describing known contents of an aggregate beforere
1485 call. */
1486
1487 struct ipa_known_agg_contents_list
1488 {
1489 /* Offset and size of the described part of the aggregate. */
1490 HOST_WIDE_INT offset, size;
1491 /* Known constant value or NULL if the contents is known to be unknown. */
1492 tree constant;
1493 /* Pointer to the next structure in the list. */
1494 struct ipa_known_agg_contents_list *next;
1495 };
1496
1497 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1498 in ARG is filled in with constant values. ARG can either be an aggregate
1499 expression or a pointer to an aggregate. ARG_TYPE is the type of the aggregate.
1500 JFUNC is the jump function into which the constants are subsequently stored. */
1501
1502 static void
1503 determine_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1504 struct ipa_jump_func *jfunc)
1505 {
1506 struct ipa_known_agg_contents_list *list = NULL;
1507 int item_count = 0, const_count = 0;
1508 HOST_WIDE_INT arg_offset, arg_size;
1509 gimple_stmt_iterator gsi;
1510 tree arg_base;
1511 bool check_ref, by_ref;
1512 ao_ref r;
1513
1514 /* The function operates in three stages. First, we prepare check_ref, r,
1515 arg_base and arg_offset based on what is actually passed as an actual
1516 argument. */
1517
1518 if (POINTER_TYPE_P (arg_type))
1519 {
1520 by_ref = true;
1521 if (TREE_CODE (arg) == SSA_NAME)
1522 {
1523 tree type_size;
1524 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1525 return;
1526 check_ref = true;
1527 arg_base = arg;
1528 arg_offset = 0;
1529 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1530 arg_size = tree_to_uhwi (type_size);
1531 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1532 }
1533 else if (TREE_CODE (arg) == ADDR_EXPR)
1534 {
1535 HOST_WIDE_INT arg_max_size;
1536
1537 arg = TREE_OPERAND (arg, 0);
1538 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1539 &arg_max_size);
1540 if (arg_max_size == -1
1541 || arg_max_size != arg_size
1542 || arg_offset < 0)
1543 return;
1544 if (DECL_P (arg_base))
1545 {
1546 tree size;
1547 check_ref = false;
1548 size = build_int_cst (integer_type_node, arg_size);
1549 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1550 }
1551 else
1552 return;
1553 }
1554 else
1555 return;
1556 }
1557 else
1558 {
1559 HOST_WIDE_INT arg_max_size;
1560
1561 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1562
1563 by_ref = false;
1564 check_ref = false;
1565 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1566 &arg_max_size);
1567 if (arg_max_size == -1
1568 || arg_max_size != arg_size
1569 || arg_offset < 0)
1570 return;
1571
1572 ao_ref_init (&r, arg);
1573 }
1574
1575 /* Second stage walks back the BB, looks at individual statements and as long
1576 as it is confident of how the statements affect contents of the
1577 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1578 describing it. */
1579 gsi = gsi_for_stmt (call);
1580 gsi_prev (&gsi);
1581 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1582 {
1583 struct ipa_known_agg_contents_list *n, **p;
1584 gimple stmt = gsi_stmt (gsi);
1585 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1586 tree lhs, rhs, lhs_base;
1587 bool partial_overlap;
1588
1589 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1590 continue;
1591 if (!gimple_assign_single_p (stmt))
1592 break;
1593
1594 lhs = gimple_assign_lhs (stmt);
1595 rhs = gimple_assign_rhs1 (stmt);
1596 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1597 || TREE_CODE (lhs) == BIT_FIELD_REF
1598 || contains_bitfld_component_ref_p (lhs))
1599 break;
1600
1601 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1602 &lhs_max_size);
1603 if (lhs_max_size == -1
1604 || lhs_max_size != lhs_size
1605 || (lhs_offset < arg_offset
1606 && lhs_offset + lhs_size > arg_offset)
1607 || (lhs_offset < arg_offset + arg_size
1608 && lhs_offset + lhs_size > arg_offset + arg_size))
1609 break;
1610
1611 if (check_ref)
1612 {
1613 if (TREE_CODE (lhs_base) != MEM_REF
1614 || TREE_OPERAND (lhs_base, 0) != arg_base
1615 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1616 break;
1617 }
1618 else if (lhs_base != arg_base)
1619 {
1620 if (DECL_P (lhs_base))
1621 continue;
1622 else
1623 break;
1624 }
1625
1626 if (lhs_offset + lhs_size < arg_offset
1627 || lhs_offset >= (arg_offset + arg_size))
1628 continue;
1629
1630 partial_overlap = false;
1631 p = &list;
1632 while (*p && (*p)->offset < lhs_offset)
1633 {
1634 if ((*p)->offset + (*p)->size > lhs_offset)
1635 {
1636 partial_overlap = true;
1637 break;
1638 }
1639 p = &(*p)->next;
1640 }
1641 if (partial_overlap)
1642 break;
1643 if (*p && (*p)->offset < lhs_offset + lhs_size)
1644 {
1645 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1646 /* We already know this value is subsequently overwritten with
1647 something else. */
1648 continue;
1649 else
1650 /* Otherwise this is a partial overlap which we cannot
1651 represent. */
1652 break;
1653 }
1654
1655 rhs = get_ssa_def_if_simple_copy (rhs);
1656 n = XALLOCA (struct ipa_known_agg_contents_list);
1657 n->size = lhs_size;
1658 n->offset = lhs_offset;
1659 if (is_gimple_ip_invariant (rhs))
1660 {
1661 n->constant = rhs;
1662 const_count++;
1663 }
1664 else
1665 n->constant = NULL_TREE;
1666 n->next = *p;
1667 *p = n;
1668
1669 item_count++;
1670 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1671 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1672 break;
1673 }
1674
1675 /* Third stage just goes over the list and creates an appropriate vector of
1676 ipa_agg_jf_item structures out of it, of sourse only if there are
1677 any known constants to begin with. */
1678
1679 if (const_count)
1680 {
1681 jfunc->agg.by_ref = by_ref;
1682 vec_alloc (jfunc->agg.items, const_count);
1683 while (list)
1684 {
1685 if (list->constant)
1686 {
1687 struct ipa_agg_jf_item item;
1688 item.offset = list->offset - arg_offset;
1689 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1690 item.value = unshare_expr_without_location (list->constant);
1691 jfunc->agg.items->quick_push (item);
1692 }
1693 list = list->next;
1694 }
1695 }
1696 }
1697
1698 static tree
1699 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1700 {
1701 int n;
1702 tree type = (e->callee
1703 ? TREE_TYPE (e->callee->decl)
1704 : gimple_call_fntype (e->call_stmt));
1705 tree t = TYPE_ARG_TYPES (type);
1706
1707 for (n = 0; n < i; n++)
1708 {
1709 if (!t)
1710 break;
1711 t = TREE_CHAIN (t);
1712 }
1713 if (t)
1714 return TREE_VALUE (t);
1715 if (!e->callee)
1716 return NULL;
1717 t = DECL_ARGUMENTS (e->callee->decl);
1718 for (n = 0; n < i; n++)
1719 {
1720 if (!t)
1721 return NULL;
1722 t = TREE_CHAIN (t);
1723 }
1724 if (t)
1725 return TREE_TYPE (t);
1726 return NULL;
1727 }
1728
1729 /* Compute jump function for all arguments of callsite CS and insert the
1730 information in the jump_functions array in the ipa_edge_args corresponding
1731 to this callsite. */
1732
1733 static void
1734 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1735 struct cgraph_edge *cs)
1736 {
1737 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1738 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1739 gimple call = cs->call_stmt;
1740 int n, arg_num = gimple_call_num_args (call);
1741
1742 if (arg_num == 0 || args->jump_functions)
1743 return;
1744 vec_safe_grow_cleared (args->jump_functions, arg_num);
1745
1746 if (gimple_call_internal_p (call))
1747 return;
1748 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1749 return;
1750
1751 for (n = 0; n < arg_num; n++)
1752 {
1753 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1754 tree arg = gimple_call_arg (call, n);
1755 tree param_type = ipa_get_callee_param_type (cs, n);
1756
1757 if (is_gimple_ip_invariant (arg))
1758 ipa_set_jf_constant (jfunc, arg, cs);
1759 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1760 && TREE_CODE (arg) == PARM_DECL)
1761 {
1762 int index = ipa_get_param_decl_index (info, arg);
1763
1764 gcc_assert (index >=0);
1765 /* Aggregate passed by value, check for pass-through, otherwise we
1766 will attempt to fill in aggregate contents later in this
1767 for cycle. */
1768 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1769 {
1770 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1771 continue;
1772 }
1773 }
1774 else if (TREE_CODE (arg) == SSA_NAME)
1775 {
1776 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1777 {
1778 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1779 if (index >= 0)
1780 {
1781 bool agg_p, type_p;
1782 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1783 if (param_type && POINTER_TYPE_P (param_type))
1784 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1785 call, jfunc);
1786 else
1787 type_p = false;
1788 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1789 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1790 type_p);
1791 }
1792 }
1793 else
1794 {
1795 gimple stmt = SSA_NAME_DEF_STMT (arg);
1796 if (is_gimple_assign (stmt))
1797 compute_complex_assign_jump_func (fbi, info, jfunc,
1798 call, stmt, arg, param_type);
1799 else if (gimple_code (stmt) == GIMPLE_PHI)
1800 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1801 call, stmt, param_type);
1802 }
1803 }
1804 else
1805 compute_known_type_jump_func (arg, jfunc, call,
1806 param_type
1807 && POINTER_TYPE_P (param_type)
1808 ? TREE_TYPE (param_type)
1809 : NULL);
1810
1811 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1812 passed (because type conversions are ignored in gimple). Usually we can
1813 safely get type from function declaration, but in case of K&R prototypes or
1814 variadic functions we can try our luck with type of the pointer passed.
1815 TODO: Since we look for actual initialization of the memory object, we may better
1816 work out the type based on the memory stores we find. */
1817 if (!param_type)
1818 param_type = TREE_TYPE (arg);
1819
1820 if ((jfunc->type != IPA_JF_PASS_THROUGH
1821 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1822 && (jfunc->type != IPA_JF_ANCESTOR
1823 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1824 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1825 || POINTER_TYPE_P (param_type)))
1826 determine_known_aggregate_parts (call, arg, param_type, jfunc);
1827 }
1828 }
1829
1830 /* Compute jump functions for all edges - both direct and indirect - outgoing
1831 from BB. */
1832
1833 static void
1834 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1835 {
1836 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1837 int i;
1838 struct cgraph_edge *cs;
1839
1840 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1841 {
1842 struct cgraph_node *callee = cs->callee;
1843
1844 if (callee)
1845 {
1846 cgraph_function_or_thunk_node (callee, NULL);
1847 /* We do not need to bother analyzing calls to unknown functions
1848 unless they may become known during lto/whopr. */
1849 if (!callee->definition && !flag_lto)
1850 continue;
1851 }
1852 ipa_compute_jump_functions_for_edge (fbi, cs);
1853 }
1854 }
1855
1856 /* If STMT looks like a statement loading a value from a member pointer formal
1857 parameter, return that parameter and store the offset of the field to
1858 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1859 might be clobbered). If USE_DELTA, then we look for a use of the delta
1860 field rather than the pfn. */
1861
1862 static tree
1863 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1864 HOST_WIDE_INT *offset_p)
1865 {
1866 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1867
1868 if (!gimple_assign_single_p (stmt))
1869 return NULL_TREE;
1870
1871 rhs = gimple_assign_rhs1 (stmt);
1872 if (TREE_CODE (rhs) == COMPONENT_REF)
1873 {
1874 ref_field = TREE_OPERAND (rhs, 1);
1875 rhs = TREE_OPERAND (rhs, 0);
1876 }
1877 else
1878 ref_field = NULL_TREE;
1879 if (TREE_CODE (rhs) != MEM_REF)
1880 return NULL_TREE;
1881 rec = TREE_OPERAND (rhs, 0);
1882 if (TREE_CODE (rec) != ADDR_EXPR)
1883 return NULL_TREE;
1884 rec = TREE_OPERAND (rec, 0);
1885 if (TREE_CODE (rec) != PARM_DECL
1886 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1887 return NULL_TREE;
1888 ref_offset = TREE_OPERAND (rhs, 1);
1889
1890 if (use_delta)
1891 fld = delta_field;
1892 else
1893 fld = ptr_field;
1894 if (offset_p)
1895 *offset_p = int_bit_position (fld);
1896
1897 if (ref_field)
1898 {
1899 if (integer_nonzerop (ref_offset))
1900 return NULL_TREE;
1901 return ref_field == fld ? rec : NULL_TREE;
1902 }
1903 else
1904 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1905 : NULL_TREE;
1906 }
1907
1908 /* Returns true iff T is an SSA_NAME defined by a statement. */
1909
1910 static bool
1911 ipa_is_ssa_with_stmt_def (tree t)
1912 {
1913 if (TREE_CODE (t) == SSA_NAME
1914 && !SSA_NAME_IS_DEFAULT_DEF (t))
1915 return true;
1916 else
1917 return false;
1918 }
1919
1920 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1921 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1922 indirect call graph edge. */
1923
1924 static struct cgraph_edge *
1925 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1926 {
1927 struct cgraph_edge *cs;
1928
1929 cs = cgraph_edge (node, stmt);
1930 cs->indirect_info->param_index = param_index;
1931 cs->indirect_info->agg_contents = 0;
1932 cs->indirect_info->member_ptr = 0;
1933 return cs;
1934 }
1935
1936 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1937 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1938 intermediate information about each formal parameter. Currently it checks
1939 whether the call calls a pointer that is a formal parameter and if so, the
1940 parameter is marked with the called flag and an indirect call graph edge
1941 describing the call is created. This is very simple for ordinary pointers
1942 represented in SSA but not-so-nice when it comes to member pointers. The
1943 ugly part of this function does nothing more than trying to match the
1944 pattern of such a call. An example of such a pattern is the gimple dump
1945 below, the call is on the last line:
1946
1947 <bb 2>:
1948 f$__delta_5 = f.__delta;
1949 f$__pfn_24 = f.__pfn;
1950
1951 or
1952 <bb 2>:
1953 f$__delta_5 = MEM[(struct *)&f];
1954 f$__pfn_24 = MEM[(struct *)&f + 4B];
1955
1956 and a few lines below:
1957
1958 <bb 5>
1959 D.2496_3 = (int) f$__pfn_24;
1960 D.2497_4 = D.2496_3 & 1;
1961 if (D.2497_4 != 0)
1962 goto <bb 3>;
1963 else
1964 goto <bb 4>;
1965
1966 <bb 6>:
1967 D.2500_7 = (unsigned int) f$__delta_5;
1968 D.2501_8 = &S + D.2500_7;
1969 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1970 D.2503_10 = *D.2502_9;
1971 D.2504_12 = f$__pfn_24 + -1;
1972 D.2505_13 = (unsigned int) D.2504_12;
1973 D.2506_14 = D.2503_10 + D.2505_13;
1974 D.2507_15 = *D.2506_14;
1975 iftmp.11_16 = (String:: *) D.2507_15;
1976
1977 <bb 7>:
1978 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1979 D.2500_19 = (unsigned int) f$__delta_5;
1980 D.2508_20 = &S + D.2500_19;
1981 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1982
1983 Such patterns are results of simple calls to a member pointer:
1984
1985 int doprinting (int (MyString::* f)(int) const)
1986 {
1987 MyString S ("somestring");
1988
1989 return (S.*f)(4);
1990 }
1991
1992 Moreover, the function also looks for called pointers loaded from aggregates
1993 passed by value or reference. */
1994
1995 static void
1996 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
1997 tree target)
1998 {
1999 struct ipa_node_params *info = fbi->info;
2000 HOST_WIDE_INT offset;
2001 bool by_ref;
2002
2003 if (SSA_NAME_IS_DEFAULT_DEF (target))
2004 {
2005 tree var = SSA_NAME_VAR (target);
2006 int index = ipa_get_param_decl_index (info, var);
2007 if (index >= 0)
2008 ipa_note_param_call (fbi->node, index, call);
2009 return;
2010 }
2011
2012 int index;
2013 gimple def = SSA_NAME_DEF_STMT (target);
2014 if (gimple_assign_single_p (def)
2015 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2016 gimple_assign_rhs1 (def), &index, &offset,
2017 NULL, &by_ref))
2018 {
2019 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2020 if (cs->indirect_info->offset != offset)
2021 cs->indirect_info->outer_type = NULL;
2022 cs->indirect_info->offset = offset;
2023 cs->indirect_info->agg_contents = 1;
2024 cs->indirect_info->by_ref = by_ref;
2025 return;
2026 }
2027
2028 /* Now we need to try to match the complex pattern of calling a member
2029 pointer. */
2030 if (gimple_code (def) != GIMPLE_PHI
2031 || gimple_phi_num_args (def) != 2
2032 || !POINTER_TYPE_P (TREE_TYPE (target))
2033 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2034 return;
2035
2036 /* First, we need to check whether one of these is a load from a member
2037 pointer that is a parameter to this function. */
2038 tree n1 = PHI_ARG_DEF (def, 0);
2039 tree n2 = PHI_ARG_DEF (def, 1);
2040 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2041 return;
2042 gimple d1 = SSA_NAME_DEF_STMT (n1);
2043 gimple d2 = SSA_NAME_DEF_STMT (n2);
2044
2045 tree rec;
2046 basic_block bb, virt_bb;
2047 basic_block join = gimple_bb (def);
2048 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2049 {
2050 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2051 return;
2052
2053 bb = EDGE_PRED (join, 0)->src;
2054 virt_bb = gimple_bb (d2);
2055 }
2056 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2057 {
2058 bb = EDGE_PRED (join, 1)->src;
2059 virt_bb = gimple_bb (d1);
2060 }
2061 else
2062 return;
2063
2064 /* Second, we need to check that the basic blocks are laid out in the way
2065 corresponding to the pattern. */
2066
2067 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2068 || single_pred (virt_bb) != bb
2069 || single_succ (virt_bb) != join)
2070 return;
2071
2072 /* Third, let's see that the branching is done depending on the least
2073 significant bit of the pfn. */
2074
2075 gimple branch = last_stmt (bb);
2076 if (!branch || gimple_code (branch) != GIMPLE_COND)
2077 return;
2078
2079 if ((gimple_cond_code (branch) != NE_EXPR
2080 && gimple_cond_code (branch) != EQ_EXPR)
2081 || !integer_zerop (gimple_cond_rhs (branch)))
2082 return;
2083
2084 tree cond = gimple_cond_lhs (branch);
2085 if (!ipa_is_ssa_with_stmt_def (cond))
2086 return;
2087
2088 def = SSA_NAME_DEF_STMT (cond);
2089 if (!is_gimple_assign (def)
2090 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2091 || !integer_onep (gimple_assign_rhs2 (def)))
2092 return;
2093
2094 cond = gimple_assign_rhs1 (def);
2095 if (!ipa_is_ssa_with_stmt_def (cond))
2096 return;
2097
2098 def = SSA_NAME_DEF_STMT (cond);
2099
2100 if (is_gimple_assign (def)
2101 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2102 {
2103 cond = gimple_assign_rhs1 (def);
2104 if (!ipa_is_ssa_with_stmt_def (cond))
2105 return;
2106 def = SSA_NAME_DEF_STMT (cond);
2107 }
2108
2109 tree rec2;
2110 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2111 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2112 == ptrmemfunc_vbit_in_delta),
2113 NULL);
2114 if (rec != rec2)
2115 return;
2116
2117 index = ipa_get_param_decl_index (info, rec);
2118 if (index >= 0
2119 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2120 {
2121 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2122 if (cs->indirect_info->offset != offset)
2123 cs->indirect_info->outer_type = NULL;
2124 cs->indirect_info->offset = offset;
2125 cs->indirect_info->agg_contents = 1;
2126 cs->indirect_info->member_ptr = 1;
2127 }
2128
2129 return;
2130 }
2131
2132 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2133 object referenced in the expression is a formal parameter of the caller
2134 FBI->node (described by FBI->info), create a call note for the
2135 statement. */
2136
2137 static void
2138 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2139 gimple call, tree target)
2140 {
2141 tree obj = OBJ_TYPE_REF_OBJECT (target);
2142 int index;
2143 HOST_WIDE_INT anc_offset;
2144
2145 if (!flag_devirtualize)
2146 return;
2147
2148 if (TREE_CODE (obj) != SSA_NAME)
2149 return;
2150
2151 struct ipa_node_params *info = fbi->info;
2152 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2153 {
2154 struct ipa_jump_func jfunc;
2155 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2156 return;
2157
2158 anc_offset = 0;
2159 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2160 gcc_assert (index >= 0);
2161 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2162 call, &jfunc))
2163 return;
2164 }
2165 else
2166 {
2167 struct ipa_jump_func jfunc;
2168 gimple stmt = SSA_NAME_DEF_STMT (obj);
2169 tree expr;
2170
2171 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2172 if (!expr)
2173 return;
2174 index = ipa_get_param_decl_index (info,
2175 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2176 gcc_assert (index >= 0);
2177 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2178 call, &jfunc, anc_offset))
2179 return;
2180 }
2181
2182 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2183 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2184 ii->offset = anc_offset;
2185 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2186 ii->otr_type = obj_type_ref_class (target);
2187 ii->polymorphic = 1;
2188 }
2189
2190 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2191 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2192 containing intermediate information about each formal parameter. */
2193
2194 static void
2195 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2196 {
2197 tree target = gimple_call_fn (call);
2198
2199 if (!target
2200 || (TREE_CODE (target) != SSA_NAME
2201 && !virtual_method_call_p (target)))
2202 return;
2203
2204 /* If we previously turned the call into a direct call, there is
2205 no need to analyze. */
2206 struct cgraph_edge *cs = cgraph_edge (fbi->node, call);
2207 if (cs && !cs->indirect_unknown_callee)
2208 return;
2209 if (TREE_CODE (target) == SSA_NAME)
2210 ipa_analyze_indirect_call_uses (fbi, call, target);
2211 else if (virtual_method_call_p (target))
2212 ipa_analyze_virtual_call_uses (fbi, call, target);
2213 }
2214
2215
2216 /* Analyze the call statement STMT with respect to formal parameters (described
2217 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2218 formal parameters are called. */
2219
2220 static void
2221 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2222 {
2223 if (is_gimple_call (stmt))
2224 ipa_analyze_call_uses (fbi, stmt);
2225 }
2226
2227 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2228 If OP is a parameter declaration, mark it as used in the info structure
2229 passed in DATA. */
2230
2231 static bool
2232 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2233 {
2234 struct ipa_node_params *info = (struct ipa_node_params *) data;
2235
2236 op = get_base_address (op);
2237 if (op
2238 && TREE_CODE (op) == PARM_DECL)
2239 {
2240 int index = ipa_get_param_decl_index (info, op);
2241 gcc_assert (index >= 0);
2242 ipa_set_param_used (info, index, true);
2243 }
2244
2245 return false;
2246 }
2247
2248 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2249 the findings in various structures of the associated ipa_node_params
2250 structure, such as parameter flags, notes etc. FBI holds various data about
2251 the function being analyzed. */
2252
2253 static void
2254 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2255 {
2256 gimple_stmt_iterator gsi;
2257 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2258 {
2259 gimple stmt = gsi_stmt (gsi);
2260
2261 if (is_gimple_debug (stmt))
2262 continue;
2263
2264 ipa_analyze_stmt_uses (fbi, stmt);
2265 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2266 visit_ref_for_mod_analysis,
2267 visit_ref_for_mod_analysis,
2268 visit_ref_for_mod_analysis);
2269 }
2270 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2271 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2272 visit_ref_for_mod_analysis,
2273 visit_ref_for_mod_analysis,
2274 visit_ref_for_mod_analysis);
2275 }
2276
2277 /* Calculate controlled uses of parameters of NODE. */
2278
2279 static void
2280 ipa_analyze_controlled_uses (struct cgraph_node *node)
2281 {
2282 struct ipa_node_params *info = IPA_NODE_REF (node);
2283
2284 for (int i = 0; i < ipa_get_param_count (info); i++)
2285 {
2286 tree parm = ipa_get_param (info, i);
2287 int controlled_uses = 0;
2288
2289 /* For SSA regs see if parameter is used. For non-SSA we compute
2290 the flag during modification analysis. */
2291 if (is_gimple_reg (parm))
2292 {
2293 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2294 parm);
2295 if (ddef && !has_zero_uses (ddef))
2296 {
2297 imm_use_iterator imm_iter;
2298 use_operand_p use_p;
2299
2300 ipa_set_param_used (info, i, true);
2301 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2302 if (!is_gimple_call (USE_STMT (use_p)))
2303 {
2304 if (!is_gimple_debug (USE_STMT (use_p)))
2305 {
2306 controlled_uses = IPA_UNDESCRIBED_USE;
2307 break;
2308 }
2309 }
2310 else
2311 controlled_uses++;
2312 }
2313 else
2314 controlled_uses = 0;
2315 }
2316 else
2317 controlled_uses = IPA_UNDESCRIBED_USE;
2318 ipa_set_controlled_uses (info, i, controlled_uses);
2319 }
2320 }
2321
2322 /* Free stuff in BI. */
2323
2324 static void
2325 free_ipa_bb_info (struct ipa_bb_info *bi)
2326 {
2327 bi->cg_edges.release ();
2328 bi->param_aa_statuses.release ();
2329 }
2330
2331 /* Dominator walker driving the analysis. */
2332
2333 class analysis_dom_walker : public dom_walker
2334 {
2335 public:
2336 analysis_dom_walker (struct func_body_info *fbi)
2337 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2338
2339 virtual void before_dom_children (basic_block);
2340
2341 private:
2342 struct func_body_info *m_fbi;
2343 };
2344
2345 void
2346 analysis_dom_walker::before_dom_children (basic_block bb)
2347 {
2348 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2349 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2350 }
2351
2352 /* Initialize the array describing properties of of formal parameters
2353 of NODE, analyze their uses and compute jump functions associated
2354 with actual arguments of calls from within NODE. */
2355
2356 void
2357 ipa_analyze_node (struct cgraph_node *node)
2358 {
2359 struct func_body_info fbi;
2360 struct ipa_node_params *info;
2361
2362 ipa_check_create_node_params ();
2363 ipa_check_create_edge_args ();
2364 info = IPA_NODE_REF (node);
2365
2366 if (info->analysis_done)
2367 return;
2368 info->analysis_done = 1;
2369
2370 if (ipa_func_spec_opts_forbid_analysis_p (node))
2371 {
2372 for (int i = 0; i < ipa_get_param_count (info); i++)
2373 {
2374 ipa_set_param_used (info, i, true);
2375 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2376 }
2377 return;
2378 }
2379
2380 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2381 push_cfun (func);
2382 calculate_dominance_info (CDI_DOMINATORS);
2383 ipa_initialize_node_params (node);
2384 ipa_analyze_controlled_uses (node);
2385
2386 fbi.node = node;
2387 fbi.info = IPA_NODE_REF (node);
2388 fbi.bb_infos = vNULL;
2389 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2390 fbi.param_count = ipa_get_param_count (info);
2391 fbi.aa_walked = 0;
2392
2393 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2394 {
2395 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2396 bi->cg_edges.safe_push (cs);
2397 }
2398
2399 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2400 {
2401 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2402 bi->cg_edges.safe_push (cs);
2403 }
2404
2405 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2406
2407 int i;
2408 struct ipa_bb_info *bi;
2409 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2410 free_ipa_bb_info (bi);
2411 fbi.bb_infos.release ();
2412 free_dominance_info (CDI_DOMINATORS);
2413 pop_cfun ();
2414 }
2415
2416 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2417 attempt a type-based devirtualization. If successful, return the
2418 target function declaration, otherwise return NULL. */
2419
2420 tree
2421 ipa_intraprocedural_devirtualization (gimple call)
2422 {
2423 tree binfo, token, fndecl;
2424 struct ipa_jump_func jfunc;
2425 tree otr = gimple_call_fn (call);
2426
2427 jfunc.type = IPA_JF_UNKNOWN;
2428 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2429 call, obj_type_ref_class (otr));
2430 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2431 return NULL_TREE;
2432 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2433 if (!binfo)
2434 return NULL_TREE;
2435 token = OBJ_TYPE_REF_TOKEN (otr);
2436 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2437 binfo);
2438 #ifdef ENABLE_CHECKING
2439 if (fndecl)
2440 gcc_assert (possible_polymorphic_call_target_p
2441 (otr, cgraph_get_node (fndecl)));
2442 #endif
2443 return fndecl;
2444 }
2445
2446 /* Update the jump function DST when the call graph edge corresponding to SRC is
2447 is being inlined, knowing that DST is of type ancestor and src of known
2448 type. */
2449
2450 static void
2451 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2452 struct ipa_jump_func *dst)
2453 {
2454 HOST_WIDE_INT combined_offset;
2455 tree combined_type;
2456
2457 if (!ipa_get_jf_ancestor_type_preserved (dst))
2458 {
2459 dst->type = IPA_JF_UNKNOWN;
2460 return;
2461 }
2462
2463 combined_offset = ipa_get_jf_known_type_offset (src)
2464 + ipa_get_jf_ancestor_offset (dst);
2465 combined_type = ipa_get_jf_ancestor_type (dst);
2466
2467 ipa_set_jf_known_type (dst, combined_offset,
2468 ipa_get_jf_known_type_base_type (src),
2469 combined_type);
2470 }
2471
2472 /* Update the jump functions associated with call graph edge E when the call
2473 graph edge CS is being inlined, assuming that E->caller is already (possibly
2474 indirectly) inlined into CS->callee and that E has not been inlined. */
2475
2476 static void
2477 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2478 struct cgraph_edge *e)
2479 {
2480 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2481 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2482 int count = ipa_get_cs_argument_count (args);
2483 int i;
2484
2485 for (i = 0; i < count; i++)
2486 {
2487 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2488
2489 if (dst->type == IPA_JF_ANCESTOR)
2490 {
2491 struct ipa_jump_func *src;
2492 int dst_fid = dst->value.ancestor.formal_id;
2493
2494 /* Variable number of arguments can cause havoc if we try to access
2495 one that does not exist in the inlined edge. So make sure we
2496 don't. */
2497 if (dst_fid >= ipa_get_cs_argument_count (top))
2498 {
2499 dst->type = IPA_JF_UNKNOWN;
2500 continue;
2501 }
2502
2503 src = ipa_get_ith_jump_func (top, dst_fid);
2504
2505 if (src->agg.items
2506 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2507 {
2508 struct ipa_agg_jf_item *item;
2509 int j;
2510
2511 /* Currently we do not produce clobber aggregate jump functions,
2512 replace with merging when we do. */
2513 gcc_assert (!dst->agg.items);
2514
2515 dst->agg.items = vec_safe_copy (src->agg.items);
2516 dst->agg.by_ref = src->agg.by_ref;
2517 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2518 item->offset -= dst->value.ancestor.offset;
2519 }
2520
2521 if (src->type == IPA_JF_KNOWN_TYPE)
2522 combine_known_type_and_ancestor_jfs (src, dst);
2523 else if (src->type == IPA_JF_PASS_THROUGH
2524 && src->value.pass_through.operation == NOP_EXPR)
2525 {
2526 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2527 dst->value.ancestor.agg_preserved &=
2528 src->value.pass_through.agg_preserved;
2529 dst->value.ancestor.type_preserved &=
2530 src->value.pass_through.type_preserved;
2531 }
2532 else if (src->type == IPA_JF_ANCESTOR)
2533 {
2534 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2535 dst->value.ancestor.offset += src->value.ancestor.offset;
2536 dst->value.ancestor.agg_preserved &=
2537 src->value.ancestor.agg_preserved;
2538 dst->value.ancestor.type_preserved &=
2539 src->value.ancestor.type_preserved;
2540 }
2541 else
2542 dst->type = IPA_JF_UNKNOWN;
2543 }
2544 else if (dst->type == IPA_JF_PASS_THROUGH)
2545 {
2546 struct ipa_jump_func *src;
2547 /* We must check range due to calls with variable number of arguments
2548 and we cannot combine jump functions with operations. */
2549 if (dst->value.pass_through.operation == NOP_EXPR
2550 && (dst->value.pass_through.formal_id
2551 < ipa_get_cs_argument_count (top)))
2552 {
2553 int dst_fid = dst->value.pass_through.formal_id;
2554 src = ipa_get_ith_jump_func (top, dst_fid);
2555 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2556
2557 switch (src->type)
2558 {
2559 case IPA_JF_UNKNOWN:
2560 dst->type = IPA_JF_UNKNOWN;
2561 break;
2562 case IPA_JF_KNOWN_TYPE:
2563 if (ipa_get_jf_pass_through_type_preserved (dst))
2564 ipa_set_jf_known_type (dst,
2565 ipa_get_jf_known_type_offset (src),
2566 ipa_get_jf_known_type_base_type (src),
2567 ipa_get_jf_known_type_component_type (src));
2568 else
2569 dst->type = IPA_JF_UNKNOWN;
2570 break;
2571 case IPA_JF_CONST:
2572 ipa_set_jf_cst_copy (dst, src);
2573 break;
2574
2575 case IPA_JF_PASS_THROUGH:
2576 {
2577 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2578 enum tree_code operation;
2579 operation = ipa_get_jf_pass_through_operation (src);
2580
2581 if (operation == NOP_EXPR)
2582 {
2583 bool agg_p, type_p;
2584 agg_p = dst_agg_p
2585 && ipa_get_jf_pass_through_agg_preserved (src);
2586 type_p = ipa_get_jf_pass_through_type_preserved (src)
2587 && ipa_get_jf_pass_through_type_preserved (dst);
2588 ipa_set_jf_simple_pass_through (dst, formal_id,
2589 agg_p, type_p);
2590 }
2591 else
2592 {
2593 tree operand = ipa_get_jf_pass_through_operand (src);
2594 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2595 operation);
2596 }
2597 break;
2598 }
2599 case IPA_JF_ANCESTOR:
2600 {
2601 bool agg_p, type_p;
2602 agg_p = dst_agg_p
2603 && ipa_get_jf_ancestor_agg_preserved (src);
2604 type_p = ipa_get_jf_ancestor_type_preserved (src)
2605 && ipa_get_jf_pass_through_type_preserved (dst);
2606 ipa_set_ancestor_jf (dst,
2607 ipa_get_jf_ancestor_offset (src),
2608 ipa_get_jf_ancestor_type (src),
2609 ipa_get_jf_ancestor_formal_id (src),
2610 agg_p, type_p);
2611 break;
2612 }
2613 default:
2614 gcc_unreachable ();
2615 }
2616
2617 if (src->agg.items
2618 && (dst_agg_p || !src->agg.by_ref))
2619 {
2620 /* Currently we do not produce clobber aggregate jump
2621 functions, replace with merging when we do. */
2622 gcc_assert (!dst->agg.items);
2623
2624 dst->agg.by_ref = src->agg.by_ref;
2625 dst->agg.items = vec_safe_copy (src->agg.items);
2626 }
2627 }
2628 else
2629 dst->type = IPA_JF_UNKNOWN;
2630 }
2631 }
2632 }
2633
2634 /* If TARGET is an addr_expr of a function declaration, make it the destination
2635 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2636
2637 struct cgraph_edge *
2638 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2639 {
2640 struct cgraph_node *callee;
2641 struct inline_edge_summary *es = inline_edge_summary (ie);
2642 bool unreachable = false;
2643
2644 if (TREE_CODE (target) == ADDR_EXPR)
2645 target = TREE_OPERAND (target, 0);
2646 if (TREE_CODE (target) != FUNCTION_DECL)
2647 {
2648 target = canonicalize_constructor_val (target, NULL);
2649 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2650 {
2651 if (ie->indirect_info->member_ptr)
2652 /* Member pointer call that goes through a VMT lookup. */
2653 return NULL;
2654
2655 if (dump_enabled_p ())
2656 {
2657 location_t loc = gimple_location (ie->call_stmt);
2658 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2659 "discovered direct call to non-function in %s/%i, "
2660 "making it __builtin_unreachable\n",
2661 ie->caller->name (),
2662 ie->caller->order);
2663 }
2664 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2665 callee = cgraph_get_create_node (target);
2666 unreachable = true;
2667 }
2668 else
2669 callee = cgraph_get_node (target);
2670 }
2671 else
2672 callee = cgraph_get_node (target);
2673
2674 /* Because may-edges are not explicitely represented and vtable may be external,
2675 we may create the first reference to the object in the unit. */
2676 if (!callee || callee->global.inlined_to)
2677 {
2678
2679 /* We are better to ensure we can refer to it.
2680 In the case of static functions we are out of luck, since we already
2681 removed its body. In the case of public functions we may or may
2682 not introduce the reference. */
2683 if (!canonicalize_constructor_val (target, NULL)
2684 || !TREE_PUBLIC (target))
2685 {
2686 if (dump_file)
2687 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2688 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2689 xstrdup (ie->caller->name ()),
2690 ie->caller->order,
2691 xstrdup (ie->callee->name ()),
2692 ie->callee->order);
2693 return NULL;
2694 }
2695 callee = cgraph_get_create_node (target);
2696 }
2697
2698 if (!dbg_cnt (devirt))
2699 return NULL;
2700
2701 ipa_check_create_node_params ();
2702
2703 /* We can not make edges to inline clones. It is bug that someone removed
2704 the cgraph node too early. */
2705 gcc_assert (!callee->global.inlined_to);
2706
2707 if (dump_file && !unreachable)
2708 {
2709 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2710 "(%s/%i -> %s/%i), for stmt ",
2711 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2712 xstrdup (ie->caller->name ()),
2713 ie->caller->order,
2714 xstrdup (callee->name ()),
2715 callee->order);
2716 if (ie->call_stmt)
2717 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2718 else
2719 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2720 }
2721 if (dump_enabled_p ())
2722 {
2723 location_t loc = gimple_location (ie->call_stmt);
2724 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2725 "converting indirect call in %s to direct call to %s\n",
2726 ie->caller->name (), callee->name ());
2727 }
2728 ie = cgraph_make_edge_direct (ie, callee);
2729 es = inline_edge_summary (ie);
2730 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2731 - eni_size_weights.call_cost);
2732 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2733 - eni_time_weights.call_cost);
2734
2735 return ie;
2736 }
2737
2738 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2739 return NULL if there is not any. BY_REF specifies whether the value has to
2740 be passed by reference or by value. */
2741
2742 tree
2743 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2744 HOST_WIDE_INT offset, bool by_ref)
2745 {
2746 struct ipa_agg_jf_item *item;
2747 int i;
2748
2749 if (by_ref != agg->by_ref)
2750 return NULL;
2751
2752 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2753 if (item->offset == offset)
2754 {
2755 /* Currently we do not have clobber values, return NULL for them once
2756 we do. */
2757 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2758 return item->value;
2759 }
2760 return NULL;
2761 }
2762
2763 /* Remove a reference to SYMBOL from the list of references of a node given by
2764 reference description RDESC. Return true if the reference has been
2765 successfully found and removed. */
2766
2767 static bool
2768 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2769 {
2770 struct ipa_ref *to_del;
2771 struct cgraph_edge *origin;
2772
2773 origin = rdesc->cs;
2774 if (!origin)
2775 return false;
2776 to_del = ipa_find_reference (origin->caller, symbol,
2777 origin->call_stmt, origin->lto_stmt_uid);
2778 if (!to_del)
2779 return false;
2780
2781 ipa_remove_reference (to_del);
2782 if (dump_file)
2783 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2784 xstrdup (origin->caller->name ()),
2785 origin->caller->order, xstrdup (symbol->name ()));
2786 return true;
2787 }
2788
2789 /* If JFUNC has a reference description with refcount different from
2790 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2791 NULL. JFUNC must be a constant jump function. */
2792
2793 static struct ipa_cst_ref_desc *
2794 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2795 {
2796 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2797 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2798 return rdesc;
2799 else
2800 return NULL;
2801 }
2802
2803 /* If the value of constant jump function JFUNC is an address of a function
2804 declaration, return the associated call graph node. Otherwise return
2805 NULL. */
2806
2807 static cgraph_node *
2808 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2809 {
2810 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2811 tree cst = ipa_get_jf_constant (jfunc);
2812 if (TREE_CODE (cst) != ADDR_EXPR
2813 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2814 return NULL;
2815
2816 return cgraph_get_node (TREE_OPERAND (cst, 0));
2817 }
2818
2819
2820 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2821 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2822 the edge specified in the rdesc. Return false if either the symbol or the
2823 reference could not be found, otherwise return true. */
2824
2825 static bool
2826 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2827 {
2828 struct ipa_cst_ref_desc *rdesc;
2829 if (jfunc->type == IPA_JF_CONST
2830 && (rdesc = jfunc_rdesc_usable (jfunc))
2831 && --rdesc->refcount == 0)
2832 {
2833 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2834 if (!symbol)
2835 return false;
2836
2837 return remove_described_reference (symbol, rdesc);
2838 }
2839 return true;
2840 }
2841
2842 /* Try to find a destination for indirect edge IE that corresponds to a simple
2843 call or a call of a member function pointer and where the destination is a
2844 pointer formal parameter described by jump function JFUNC. If it can be
2845 determined, return the newly direct edge, otherwise return NULL.
2846 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2847
2848 static struct cgraph_edge *
2849 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2850 struct ipa_jump_func *jfunc,
2851 struct ipa_node_params *new_root_info)
2852 {
2853 struct cgraph_edge *cs;
2854 tree target;
2855 bool agg_contents = ie->indirect_info->agg_contents;
2856
2857 if (ie->indirect_info->agg_contents)
2858 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2859 ie->indirect_info->offset,
2860 ie->indirect_info->by_ref);
2861 else
2862 target = ipa_value_from_jfunc (new_root_info, jfunc);
2863 if (!target)
2864 return NULL;
2865 cs = ipa_make_edge_direct_to_target (ie, target);
2866
2867 if (cs && !agg_contents)
2868 {
2869 bool ok;
2870 gcc_checking_assert (cs->callee
2871 && (cs != ie
2872 || jfunc->type != IPA_JF_CONST
2873 || !cgraph_node_for_jfunc (jfunc)
2874 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2875 ok = try_decrement_rdesc_refcount (jfunc);
2876 gcc_checking_assert (ok);
2877 }
2878
2879 return cs;
2880 }
2881
2882 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2883 call based on a formal parameter which is described by jump function JFUNC
2884 and if it can be determined, make it direct and return the direct edge.
2885 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2886 are relative to. */
2887
2888 static struct cgraph_edge *
2889 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2890 struct ipa_jump_func *jfunc,
2891 struct ipa_node_params *new_root_info)
2892 {
2893 tree binfo, target;
2894
2895 if (!flag_devirtualize)
2896 return NULL;
2897
2898 /* First try to do lookup via known virtual table pointer value. */
2899 if (!ie->indirect_info->by_ref)
2900 {
2901 tree vtable;
2902 unsigned HOST_WIDE_INT offset;
2903 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2904 ie->indirect_info->offset,
2905 true);
2906 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2907 {
2908 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2909 vtable, offset);
2910 if (target)
2911 {
2912 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
2913 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
2914 || !possible_polymorphic_call_target_p
2915 (ie, cgraph_get_node (target)))
2916 {
2917 if (dump_file)
2918 fprintf (dump_file,
2919 "Type inconsident devirtualization: %s/%i->%s\n",
2920 ie->caller->name (), ie->caller->order,
2921 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2922 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2923 cgraph_get_create_node (target);
2924 }
2925 return ipa_make_edge_direct_to_target (ie, target);
2926 }
2927 }
2928 }
2929
2930 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2931
2932 if (!binfo)
2933 return NULL;
2934
2935 if (TREE_CODE (binfo) != TREE_BINFO)
2936 {
2937 ipa_polymorphic_call_context context;
2938 vec <cgraph_node *>targets;
2939 bool final;
2940
2941 if (!get_polymorphic_call_info_from_invariant
2942 (&context, binfo, ie->indirect_info->otr_type,
2943 ie->indirect_info->offset))
2944 return NULL;
2945 targets = possible_polymorphic_call_targets
2946 (ie->indirect_info->otr_type,
2947 ie->indirect_info->otr_token,
2948 context, &final);
2949 if (!final || targets.length () > 1)
2950 return NULL;
2951 if (targets.length () == 1)
2952 target = targets[0]->decl;
2953 else
2954 {
2955 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2956 cgraph_get_create_node (target);
2957 }
2958 }
2959 else
2960 {
2961 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2962 ie->indirect_info->otr_type);
2963 if (binfo)
2964 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2965 binfo);
2966 else
2967 return NULL;
2968 }
2969
2970 if (target)
2971 {
2972 #ifdef ENABLE_CHECKING
2973 gcc_assert (possible_polymorphic_call_target_p
2974 (ie, cgraph_get_node (target)));
2975 #endif
2976 return ipa_make_edge_direct_to_target (ie, target);
2977 }
2978 else
2979 return NULL;
2980 }
2981
2982 /* Update the param called notes associated with NODE when CS is being inlined,
2983 assuming NODE is (potentially indirectly) inlined into CS->callee.
2984 Moreover, if the callee is discovered to be constant, create a new cgraph
2985 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2986 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2987
2988 static bool
2989 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2990 struct cgraph_node *node,
2991 vec<cgraph_edge_p> *new_edges)
2992 {
2993 struct ipa_edge_args *top;
2994 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2995 struct ipa_node_params *new_root_info;
2996 bool res = false;
2997
2998 ipa_check_create_edge_args ();
2999 top = IPA_EDGE_REF (cs);
3000 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3001 ? cs->caller->global.inlined_to
3002 : cs->caller);
3003
3004 for (ie = node->indirect_calls; ie; ie = next_ie)
3005 {
3006 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3007 struct ipa_jump_func *jfunc;
3008 int param_index;
3009
3010 next_ie = ie->next_callee;
3011
3012 if (ici->param_index == -1)
3013 continue;
3014
3015 /* We must check range due to calls with variable number of arguments: */
3016 if (ici->param_index >= ipa_get_cs_argument_count (top))
3017 {
3018 ici->param_index = -1;
3019 continue;
3020 }
3021
3022 param_index = ici->param_index;
3023 jfunc = ipa_get_ith_jump_func (top, param_index);
3024
3025 if (!flag_indirect_inlining)
3026 new_direct_edge = NULL;
3027 else if (ici->polymorphic)
3028 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3029 new_root_info);
3030 else
3031 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3032 new_root_info);
3033 /* If speculation was removed, then we need to do nothing. */
3034 if (new_direct_edge && new_direct_edge != ie)
3035 {
3036 new_direct_edge->indirect_inlining_edge = 1;
3037 top = IPA_EDGE_REF (cs);
3038 res = true;
3039 }
3040 else if (new_direct_edge)
3041 {
3042 new_direct_edge->indirect_inlining_edge = 1;
3043 if (new_direct_edge->call_stmt)
3044 new_direct_edge->call_stmt_cannot_inline_p
3045 = !gimple_check_call_matching_types (
3046 new_direct_edge->call_stmt,
3047 new_direct_edge->callee->decl, false);
3048 if (new_edges)
3049 {
3050 new_edges->safe_push (new_direct_edge);
3051 res = true;
3052 }
3053 top = IPA_EDGE_REF (cs);
3054 }
3055 else if (jfunc->type == IPA_JF_PASS_THROUGH
3056 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3057 {
3058 if ((ici->agg_contents
3059 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3060 || (ici->polymorphic
3061 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3062 ici->param_index = -1;
3063 else
3064 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3065 }
3066 else if (jfunc->type == IPA_JF_ANCESTOR)
3067 {
3068 if ((ici->agg_contents
3069 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3070 || (ici->polymorphic
3071 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3072 ici->param_index = -1;
3073 else
3074 {
3075 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3076 if (ipa_get_jf_ancestor_offset (jfunc))
3077 ici->outer_type = NULL;
3078 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3079 }
3080 }
3081 else
3082 /* Either we can find a destination for this edge now or never. */
3083 ici->param_index = -1;
3084 }
3085
3086 return res;
3087 }
3088
3089 /* Recursively traverse subtree of NODE (including node) made of inlined
3090 cgraph_edges when CS has been inlined and invoke
3091 update_indirect_edges_after_inlining on all nodes and
3092 update_jump_functions_after_inlining on all non-inlined edges that lead out
3093 of this subtree. Newly discovered indirect edges will be added to
3094 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3095 created. */
3096
3097 static bool
3098 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3099 struct cgraph_node *node,
3100 vec<cgraph_edge_p> *new_edges)
3101 {
3102 struct cgraph_edge *e;
3103 bool res;
3104
3105 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3106
3107 for (e = node->callees; e; e = e->next_callee)
3108 if (!e->inline_failed)
3109 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3110 else
3111 update_jump_functions_after_inlining (cs, e);
3112 for (e = node->indirect_calls; e; e = e->next_callee)
3113 update_jump_functions_after_inlining (cs, e);
3114
3115 return res;
3116 }
3117
3118 /* Combine two controlled uses counts as done during inlining. */
3119
3120 static int
3121 combine_controlled_uses_counters (int c, int d)
3122 {
3123 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3124 return IPA_UNDESCRIBED_USE;
3125 else
3126 return c + d - 1;
3127 }
3128
3129 /* Propagate number of controlled users from CS->caleee to the new root of the
3130 tree of inlined nodes. */
3131
3132 static void
3133 propagate_controlled_uses (struct cgraph_edge *cs)
3134 {
3135 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3136 struct cgraph_node *new_root = cs->caller->global.inlined_to
3137 ? cs->caller->global.inlined_to : cs->caller;
3138 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3139 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3140 int count, i;
3141
3142 count = MIN (ipa_get_cs_argument_count (args),
3143 ipa_get_param_count (old_root_info));
3144 for (i = 0; i < count; i++)
3145 {
3146 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3147 struct ipa_cst_ref_desc *rdesc;
3148
3149 if (jf->type == IPA_JF_PASS_THROUGH)
3150 {
3151 int src_idx, c, d;
3152 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3153 c = ipa_get_controlled_uses (new_root_info, src_idx);
3154 d = ipa_get_controlled_uses (old_root_info, i);
3155
3156 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3157 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3158 c = combine_controlled_uses_counters (c, d);
3159 ipa_set_controlled_uses (new_root_info, src_idx, c);
3160 if (c == 0 && new_root_info->ipcp_orig_node)
3161 {
3162 struct cgraph_node *n;
3163 struct ipa_ref *ref;
3164 tree t = new_root_info->known_vals[src_idx];
3165
3166 if (t && TREE_CODE (t) == ADDR_EXPR
3167 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3168 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
3169 && (ref = ipa_find_reference (new_root,
3170 n, NULL, 0)))
3171 {
3172 if (dump_file)
3173 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3174 "reference from %s/%i to %s/%i.\n",
3175 xstrdup (new_root->name ()),
3176 new_root->order,
3177 xstrdup (n->name ()), n->order);
3178 ipa_remove_reference (ref);
3179 }
3180 }
3181 }
3182 else if (jf->type == IPA_JF_CONST
3183 && (rdesc = jfunc_rdesc_usable (jf)))
3184 {
3185 int d = ipa_get_controlled_uses (old_root_info, i);
3186 int c = rdesc->refcount;
3187 rdesc->refcount = combine_controlled_uses_counters (c, d);
3188 if (rdesc->refcount == 0)
3189 {
3190 tree cst = ipa_get_jf_constant (jf);
3191 struct cgraph_node *n;
3192 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3193 && TREE_CODE (TREE_OPERAND (cst, 0))
3194 == FUNCTION_DECL);
3195 n = cgraph_get_node (TREE_OPERAND (cst, 0));
3196 if (n)
3197 {
3198 struct cgraph_node *clone;
3199 bool ok;
3200 ok = remove_described_reference (n, rdesc);
3201 gcc_checking_assert (ok);
3202
3203 clone = cs->caller;
3204 while (clone->global.inlined_to
3205 && clone != rdesc->cs->caller
3206 && IPA_NODE_REF (clone)->ipcp_orig_node)
3207 {
3208 struct ipa_ref *ref;
3209 ref = ipa_find_reference (clone,
3210 n, NULL, 0);
3211 if (ref)
3212 {
3213 if (dump_file)
3214 fprintf (dump_file, "ipa-prop: Removing "
3215 "cloning-created reference "
3216 "from %s/%i to %s/%i.\n",
3217 xstrdup (clone->name ()),
3218 clone->order,
3219 xstrdup (n->name ()),
3220 n->order);
3221 ipa_remove_reference (ref);
3222 }
3223 clone = clone->callers->caller;
3224 }
3225 }
3226 }
3227 }
3228 }
3229
3230 for (i = ipa_get_param_count (old_root_info);
3231 i < ipa_get_cs_argument_count (args);
3232 i++)
3233 {
3234 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3235
3236 if (jf->type == IPA_JF_CONST)
3237 {
3238 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3239 if (rdesc)
3240 rdesc->refcount = IPA_UNDESCRIBED_USE;
3241 }
3242 else if (jf->type == IPA_JF_PASS_THROUGH)
3243 ipa_set_controlled_uses (new_root_info,
3244 jf->value.pass_through.formal_id,
3245 IPA_UNDESCRIBED_USE);
3246 }
3247 }
3248
3249 /* Update jump functions and call note functions on inlining the call site CS.
3250 CS is expected to lead to a node already cloned by
3251 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3252 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3253 created. */
3254
3255 bool
3256 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3257 vec<cgraph_edge_p> *new_edges)
3258 {
3259 bool changed;
3260 /* Do nothing if the preparation phase has not been carried out yet
3261 (i.e. during early inlining). */
3262 if (!ipa_node_params_vector.exists ())
3263 return false;
3264 gcc_assert (ipa_edge_args_vector);
3265
3266 propagate_controlled_uses (cs);
3267 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3268
3269 return changed;
3270 }
3271
3272 /* Frees all dynamically allocated structures that the argument info points
3273 to. */
3274
3275 void
3276 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3277 {
3278 vec_free (args->jump_functions);
3279 memset (args, 0, sizeof (*args));
3280 }
3281
3282 /* Free all ipa_edge structures. */
3283
3284 void
3285 ipa_free_all_edge_args (void)
3286 {
3287 int i;
3288 struct ipa_edge_args *args;
3289
3290 if (!ipa_edge_args_vector)
3291 return;
3292
3293 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3294 ipa_free_edge_args_substructures (args);
3295
3296 vec_free (ipa_edge_args_vector);
3297 }
3298
3299 /* Frees all dynamically allocated structures that the param info points
3300 to. */
3301
3302 void
3303 ipa_free_node_params_substructures (struct ipa_node_params *info)
3304 {
3305 info->descriptors.release ();
3306 free (info->lattices);
3307 /* Lattice values and their sources are deallocated with their alocation
3308 pool. */
3309 info->known_vals.release ();
3310 memset (info, 0, sizeof (*info));
3311 }
3312
3313 /* Free all ipa_node_params structures. */
3314
3315 void
3316 ipa_free_all_node_params (void)
3317 {
3318 int i;
3319 struct ipa_node_params *info;
3320
3321 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3322 ipa_free_node_params_substructures (info);
3323
3324 ipa_node_params_vector.release ();
3325 }
3326
3327 /* Set the aggregate replacements of NODE to be AGGVALS. */
3328
3329 void
3330 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3331 struct ipa_agg_replacement_value *aggvals)
3332 {
3333 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3334 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3335
3336 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3337 }
3338
3339 /* Hook that is called by cgraph.c when an edge is removed. */
3340
3341 static void
3342 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3343 {
3344 struct ipa_edge_args *args;
3345
3346 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3347 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3348 return;
3349
3350 args = IPA_EDGE_REF (cs);
3351 if (args->jump_functions)
3352 {
3353 struct ipa_jump_func *jf;
3354 int i;
3355 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3356 {
3357 struct ipa_cst_ref_desc *rdesc;
3358 try_decrement_rdesc_refcount (jf);
3359 if (jf->type == IPA_JF_CONST
3360 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3361 && rdesc->cs == cs)
3362 rdesc->cs = NULL;
3363 }
3364 }
3365
3366 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3367 }
3368
3369 /* Hook that is called by cgraph.c when a node is removed. */
3370
3371 static void
3372 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3373 {
3374 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3375 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3376 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3377 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3378 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3379 }
3380
3381 /* Hook that is called by cgraph.c when an edge is duplicated. */
3382
3383 static void
3384 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3385 __attribute__((unused)) void *data)
3386 {
3387 struct ipa_edge_args *old_args, *new_args;
3388 unsigned int i;
3389
3390 ipa_check_create_edge_args ();
3391
3392 old_args = IPA_EDGE_REF (src);
3393 new_args = IPA_EDGE_REF (dst);
3394
3395 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3396
3397 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3398 {
3399 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3400 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3401
3402 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3403
3404 if (src_jf->type == IPA_JF_CONST)
3405 {
3406 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3407
3408 if (!src_rdesc)
3409 dst_jf->value.constant.rdesc = NULL;
3410 else if (src->caller == dst->caller)
3411 {
3412 struct ipa_ref *ref;
3413 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3414 gcc_checking_assert (n);
3415 ref = ipa_find_reference (src->caller, n,
3416 src->call_stmt, src->lto_stmt_uid);
3417 gcc_checking_assert (ref);
3418 ipa_clone_ref (ref, dst->caller, ref->stmt);
3419
3420 gcc_checking_assert (ipa_refdesc_pool);
3421 struct ipa_cst_ref_desc *dst_rdesc
3422 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3423 dst_rdesc->cs = dst;
3424 dst_rdesc->refcount = src_rdesc->refcount;
3425 dst_rdesc->next_duplicate = NULL;
3426 dst_jf->value.constant.rdesc = dst_rdesc;
3427 }
3428 else if (src_rdesc->cs == src)
3429 {
3430 struct ipa_cst_ref_desc *dst_rdesc;
3431 gcc_checking_assert (ipa_refdesc_pool);
3432 dst_rdesc
3433 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3434 dst_rdesc->cs = dst;
3435 dst_rdesc->refcount = src_rdesc->refcount;
3436 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3437 src_rdesc->next_duplicate = dst_rdesc;
3438 dst_jf->value.constant.rdesc = dst_rdesc;
3439 }
3440 else
3441 {
3442 struct ipa_cst_ref_desc *dst_rdesc;
3443 /* This can happen during inlining, when a JFUNC can refer to a
3444 reference taken in a function up in the tree of inline clones.
3445 We need to find the duplicate that refers to our tree of
3446 inline clones. */
3447
3448 gcc_assert (dst->caller->global.inlined_to);
3449 for (dst_rdesc = src_rdesc->next_duplicate;
3450 dst_rdesc;
3451 dst_rdesc = dst_rdesc->next_duplicate)
3452 {
3453 struct cgraph_node *top;
3454 top = dst_rdesc->cs->caller->global.inlined_to
3455 ? dst_rdesc->cs->caller->global.inlined_to
3456 : dst_rdesc->cs->caller;
3457 if (dst->caller->global.inlined_to == top)
3458 break;
3459 }
3460 gcc_assert (dst_rdesc);
3461 dst_jf->value.constant.rdesc = dst_rdesc;
3462 }
3463 }
3464 }
3465 }
3466
3467 /* Hook that is called by cgraph.c when a node is duplicated. */
3468
3469 static void
3470 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3471 ATTRIBUTE_UNUSED void *data)
3472 {
3473 struct ipa_node_params *old_info, *new_info;
3474 struct ipa_agg_replacement_value *old_av, *new_av;
3475
3476 ipa_check_create_node_params ();
3477 old_info = IPA_NODE_REF (src);
3478 new_info = IPA_NODE_REF (dst);
3479
3480 new_info->descriptors = old_info->descriptors.copy ();
3481 new_info->lattices = NULL;
3482 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3483
3484 new_info->analysis_done = old_info->analysis_done;
3485 new_info->node_enqueued = old_info->node_enqueued;
3486
3487 old_av = ipa_get_agg_replacements_for_node (src);
3488 if (!old_av)
3489 return;
3490
3491 new_av = NULL;
3492 while (old_av)
3493 {
3494 struct ipa_agg_replacement_value *v;
3495
3496 v = ggc_alloc<ipa_agg_replacement_value> ();
3497 memcpy (v, old_av, sizeof (*v));
3498 v->next = new_av;
3499 new_av = v;
3500 old_av = old_av->next;
3501 }
3502 ipa_set_node_agg_value_chain (dst, new_av);
3503 }
3504
3505
3506 /* Analyze newly added function into callgraph. */
3507
3508 static void
3509 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3510 {
3511 if (cgraph_function_with_gimple_body_p (node))
3512 ipa_analyze_node (node);
3513 }
3514
3515 /* Register our cgraph hooks if they are not already there. */
3516
3517 void
3518 ipa_register_cgraph_hooks (void)
3519 {
3520 if (!edge_removal_hook_holder)
3521 edge_removal_hook_holder =
3522 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3523 if (!node_removal_hook_holder)
3524 node_removal_hook_holder =
3525 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3526 if (!edge_duplication_hook_holder)
3527 edge_duplication_hook_holder =
3528 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3529 if (!node_duplication_hook_holder)
3530 node_duplication_hook_holder =
3531 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3532 function_insertion_hook_holder =
3533 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3534 }
3535
3536 /* Unregister our cgraph hooks if they are not already there. */
3537
3538 static void
3539 ipa_unregister_cgraph_hooks (void)
3540 {
3541 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3542 edge_removal_hook_holder = NULL;
3543 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3544 node_removal_hook_holder = NULL;
3545 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3546 edge_duplication_hook_holder = NULL;
3547 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3548 node_duplication_hook_holder = NULL;
3549 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3550 function_insertion_hook_holder = NULL;
3551 }
3552
3553 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3554 longer needed after ipa-cp. */
3555
3556 void
3557 ipa_free_all_structures_after_ipa_cp (void)
3558 {
3559 if (!optimize)
3560 {
3561 ipa_free_all_edge_args ();
3562 ipa_free_all_node_params ();
3563 free_alloc_pool (ipcp_sources_pool);
3564 free_alloc_pool (ipcp_values_pool);
3565 free_alloc_pool (ipcp_agg_lattice_pool);
3566 ipa_unregister_cgraph_hooks ();
3567 if (ipa_refdesc_pool)
3568 free_alloc_pool (ipa_refdesc_pool);
3569 }
3570 }
3571
3572 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3573 longer needed after indirect inlining. */
3574
3575 void
3576 ipa_free_all_structures_after_iinln (void)
3577 {
3578 ipa_free_all_edge_args ();
3579 ipa_free_all_node_params ();
3580 ipa_unregister_cgraph_hooks ();
3581 if (ipcp_sources_pool)
3582 free_alloc_pool (ipcp_sources_pool);
3583 if (ipcp_values_pool)
3584 free_alloc_pool (ipcp_values_pool);
3585 if (ipcp_agg_lattice_pool)
3586 free_alloc_pool (ipcp_agg_lattice_pool);
3587 if (ipa_refdesc_pool)
3588 free_alloc_pool (ipa_refdesc_pool);
3589 }
3590
3591 /* Print ipa_tree_map data structures of all functions in the
3592 callgraph to F. */
3593
3594 void
3595 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3596 {
3597 int i, count;
3598 struct ipa_node_params *info;
3599
3600 if (!node->definition)
3601 return;
3602 info = IPA_NODE_REF (node);
3603 fprintf (f, " function %s/%i parameter descriptors:\n",
3604 node->name (), node->order);
3605 count = ipa_get_param_count (info);
3606 for (i = 0; i < count; i++)
3607 {
3608 int c;
3609
3610 fprintf (f, " ");
3611 ipa_dump_param (f, info, i);
3612 if (ipa_is_param_used (info, i))
3613 fprintf (f, " used");
3614 c = ipa_get_controlled_uses (info, i);
3615 if (c == IPA_UNDESCRIBED_USE)
3616 fprintf (f, " undescribed_use");
3617 else
3618 fprintf (f, " controlled_uses=%i", c);
3619 fprintf (f, "\n");
3620 }
3621 }
3622
3623 /* Print ipa_tree_map data structures of all functions in the
3624 callgraph to F. */
3625
3626 void
3627 ipa_print_all_params (FILE * f)
3628 {
3629 struct cgraph_node *node;
3630
3631 fprintf (f, "\nFunction parameters:\n");
3632 FOR_EACH_FUNCTION (node)
3633 ipa_print_node_params (f, node);
3634 }
3635
3636 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3637
3638 vec<tree>
3639 ipa_get_vector_of_formal_parms (tree fndecl)
3640 {
3641 vec<tree> args;
3642 int count;
3643 tree parm;
3644
3645 gcc_assert (!flag_wpa);
3646 count = count_formal_params (fndecl);
3647 args.create (count);
3648 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3649 args.quick_push (parm);
3650
3651 return args;
3652 }
3653
3654 /* Return a heap allocated vector containing types of formal parameters of
3655 function type FNTYPE. */
3656
3657 vec<tree>
3658 ipa_get_vector_of_formal_parm_types (tree fntype)
3659 {
3660 vec<tree> types;
3661 int count = 0;
3662 tree t;
3663
3664 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3665 count++;
3666
3667 types.create (count);
3668 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3669 types.quick_push (TREE_VALUE (t));
3670
3671 return types;
3672 }
3673
3674 /* Modify the function declaration FNDECL and its type according to the plan in
3675 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3676 to reflect the actual parameters being modified which are determined by the
3677 base_index field. */
3678
3679 void
3680 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3681 {
3682 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3683 tree orig_type = TREE_TYPE (fndecl);
3684 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3685
3686 /* The following test is an ugly hack, some functions simply don't have any
3687 arguments in their type. This is probably a bug but well... */
3688 bool care_for_types = (old_arg_types != NULL_TREE);
3689 bool last_parm_void;
3690 vec<tree> otypes;
3691 if (care_for_types)
3692 {
3693 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3694 == void_type_node);
3695 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3696 if (last_parm_void)
3697 gcc_assert (oparms.length () + 1 == otypes.length ());
3698 else
3699 gcc_assert (oparms.length () == otypes.length ());
3700 }
3701 else
3702 {
3703 last_parm_void = false;
3704 otypes.create (0);
3705 }
3706
3707 int len = adjustments.length ();
3708 tree *link = &DECL_ARGUMENTS (fndecl);
3709 tree new_arg_types = NULL;
3710 for (int i = 0; i < len; i++)
3711 {
3712 struct ipa_parm_adjustment *adj;
3713 gcc_assert (link);
3714
3715 adj = &adjustments[i];
3716 tree parm;
3717 if (adj->op == IPA_PARM_OP_NEW)
3718 parm = NULL;
3719 else
3720 parm = oparms[adj->base_index];
3721 adj->base = parm;
3722
3723 if (adj->op == IPA_PARM_OP_COPY)
3724 {
3725 if (care_for_types)
3726 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3727 new_arg_types);
3728 *link = parm;
3729 link = &DECL_CHAIN (parm);
3730 }
3731 else if (adj->op != IPA_PARM_OP_REMOVE)
3732 {
3733 tree new_parm;
3734 tree ptype;
3735
3736 if (adj->by_ref)
3737 ptype = build_pointer_type (adj->type);
3738 else
3739 {
3740 ptype = adj->type;
3741 if (is_gimple_reg_type (ptype))
3742 {
3743 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3744 if (TYPE_ALIGN (ptype) < malign)
3745 ptype = build_aligned_type (ptype, malign);
3746 }
3747 }
3748
3749 if (care_for_types)
3750 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3751
3752 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3753 ptype);
3754 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3755 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3756 DECL_ARTIFICIAL (new_parm) = 1;
3757 DECL_ARG_TYPE (new_parm) = ptype;
3758 DECL_CONTEXT (new_parm) = fndecl;
3759 TREE_USED (new_parm) = 1;
3760 DECL_IGNORED_P (new_parm) = 1;
3761 layout_decl (new_parm, 0);
3762
3763 if (adj->op == IPA_PARM_OP_NEW)
3764 adj->base = NULL;
3765 else
3766 adj->base = parm;
3767 adj->new_decl = new_parm;
3768
3769 *link = new_parm;
3770 link = &DECL_CHAIN (new_parm);
3771 }
3772 }
3773
3774 *link = NULL_TREE;
3775
3776 tree new_reversed = NULL;
3777 if (care_for_types)
3778 {
3779 new_reversed = nreverse (new_arg_types);
3780 if (last_parm_void)
3781 {
3782 if (new_reversed)
3783 TREE_CHAIN (new_arg_types) = void_list_node;
3784 else
3785 new_reversed = void_list_node;
3786 }
3787 }
3788
3789 /* Use copy_node to preserve as much as possible from original type
3790 (debug info, attribute lists etc.)
3791 Exception is METHOD_TYPEs must have THIS argument.
3792 When we are asked to remove it, we need to build new FUNCTION_TYPE
3793 instead. */
3794 tree new_type = NULL;
3795 if (TREE_CODE (orig_type) != METHOD_TYPE
3796 || (adjustments[0].op == IPA_PARM_OP_COPY
3797 && adjustments[0].base_index == 0))
3798 {
3799 new_type = build_distinct_type_copy (orig_type);
3800 TYPE_ARG_TYPES (new_type) = new_reversed;
3801 }
3802 else
3803 {
3804 new_type
3805 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3806 new_reversed));
3807 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3808 DECL_VINDEX (fndecl) = NULL_TREE;
3809 }
3810
3811 /* When signature changes, we need to clear builtin info. */
3812 if (DECL_BUILT_IN (fndecl))
3813 {
3814 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3815 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3816 }
3817
3818 /* This is a new type, not a copy of an old type. Need to reassociate
3819 variants. We can handle everything except the main variant lazily. */
3820 tree t = TYPE_MAIN_VARIANT (orig_type);
3821 if (orig_type != t)
3822 {
3823 TYPE_MAIN_VARIANT (new_type) = t;
3824 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3825 TYPE_NEXT_VARIANT (t) = new_type;
3826 }
3827 else
3828 {
3829 TYPE_MAIN_VARIANT (new_type) = new_type;
3830 TYPE_NEXT_VARIANT (new_type) = NULL;
3831 }
3832
3833 TREE_TYPE (fndecl) = new_type;
3834 DECL_VIRTUAL_P (fndecl) = 0;
3835 DECL_LANG_SPECIFIC (fndecl) = NULL;
3836 otypes.release ();
3837 oparms.release ();
3838 }
3839
3840 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3841 If this is a directly recursive call, CS must be NULL. Otherwise it must
3842 contain the corresponding call graph edge. */
3843
3844 void
3845 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3846 ipa_parm_adjustment_vec adjustments)
3847 {
3848 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3849 vec<tree> vargs;
3850 vec<tree, va_gc> **debug_args = NULL;
3851 gimple new_stmt;
3852 gimple_stmt_iterator gsi, prev_gsi;
3853 tree callee_decl;
3854 int i, len;
3855
3856 len = adjustments.length ();
3857 vargs.create (len);
3858 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3859 ipa_remove_stmt_references (current_node, stmt);
3860
3861 gsi = gsi_for_stmt (stmt);
3862 prev_gsi = gsi;
3863 gsi_prev (&prev_gsi);
3864 for (i = 0; i < len; i++)
3865 {
3866 struct ipa_parm_adjustment *adj;
3867
3868 adj = &adjustments[i];
3869
3870 if (adj->op == IPA_PARM_OP_COPY)
3871 {
3872 tree arg = gimple_call_arg (stmt, adj->base_index);
3873
3874 vargs.quick_push (arg);
3875 }
3876 else if (adj->op != IPA_PARM_OP_REMOVE)
3877 {
3878 tree expr, base, off;
3879 location_t loc;
3880 unsigned int deref_align = 0;
3881 bool deref_base = false;
3882
3883 /* We create a new parameter out of the value of the old one, we can
3884 do the following kind of transformations:
3885
3886 - A scalar passed by reference is converted to a scalar passed by
3887 value. (adj->by_ref is false and the type of the original
3888 actual argument is a pointer to a scalar).
3889
3890 - A part of an aggregate is passed instead of the whole aggregate.
3891 The part can be passed either by value or by reference, this is
3892 determined by value of adj->by_ref. Moreover, the code below
3893 handles both situations when the original aggregate is passed by
3894 value (its type is not a pointer) and when it is passed by
3895 reference (it is a pointer to an aggregate).
3896
3897 When the new argument is passed by reference (adj->by_ref is true)
3898 it must be a part of an aggregate and therefore we form it by
3899 simply taking the address of a reference inside the original
3900 aggregate. */
3901
3902 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3903 base = gimple_call_arg (stmt, adj->base_index);
3904 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3905 : EXPR_LOCATION (base);
3906
3907 if (TREE_CODE (base) != ADDR_EXPR
3908 && POINTER_TYPE_P (TREE_TYPE (base)))
3909 off = build_int_cst (adj->alias_ptr_type,
3910 adj->offset / BITS_PER_UNIT);
3911 else
3912 {
3913 HOST_WIDE_INT base_offset;
3914 tree prev_base;
3915 bool addrof;
3916
3917 if (TREE_CODE (base) == ADDR_EXPR)
3918 {
3919 base = TREE_OPERAND (base, 0);
3920 addrof = true;
3921 }
3922 else
3923 addrof = false;
3924 prev_base = base;
3925 base = get_addr_base_and_unit_offset (base, &base_offset);
3926 /* Aggregate arguments can have non-invariant addresses. */
3927 if (!base)
3928 {
3929 base = build_fold_addr_expr (prev_base);
3930 off = build_int_cst (adj->alias_ptr_type,
3931 adj->offset / BITS_PER_UNIT);
3932 }
3933 else if (TREE_CODE (base) == MEM_REF)
3934 {
3935 if (!addrof)
3936 {
3937 deref_base = true;
3938 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3939 }
3940 off = build_int_cst (adj->alias_ptr_type,
3941 base_offset
3942 + adj->offset / BITS_PER_UNIT);
3943 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3944 off);
3945 base = TREE_OPERAND (base, 0);
3946 }
3947 else
3948 {
3949 off = build_int_cst (adj->alias_ptr_type,
3950 base_offset
3951 + adj->offset / BITS_PER_UNIT);
3952 base = build_fold_addr_expr (base);
3953 }
3954 }
3955
3956 if (!adj->by_ref)
3957 {
3958 tree type = adj->type;
3959 unsigned int align;
3960 unsigned HOST_WIDE_INT misalign;
3961
3962 if (deref_base)
3963 {
3964 align = deref_align;
3965 misalign = 0;
3966 }
3967 else
3968 {
3969 get_pointer_alignment_1 (base, &align, &misalign);
3970 if (TYPE_ALIGN (type) > align)
3971 align = TYPE_ALIGN (type);
3972 }
3973 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3974 * BITS_PER_UNIT);
3975 misalign = misalign & (align - 1);
3976 if (misalign != 0)
3977 align = (misalign & -misalign);
3978 if (align < TYPE_ALIGN (type))
3979 type = build_aligned_type (type, align);
3980 base = force_gimple_operand_gsi (&gsi, base,
3981 true, NULL, true, GSI_SAME_STMT);
3982 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3983 /* If expr is not a valid gimple call argument emit
3984 a load into a temporary. */
3985 if (is_gimple_reg_type (TREE_TYPE (expr)))
3986 {
3987 gimple tem = gimple_build_assign (NULL_TREE, expr);
3988 if (gimple_in_ssa_p (cfun))
3989 {
3990 gimple_set_vuse (tem, gimple_vuse (stmt));
3991 expr = make_ssa_name (TREE_TYPE (expr), tem);
3992 }
3993 else
3994 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
3995 gimple_assign_set_lhs (tem, expr);
3996 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
3997 }
3998 }
3999 else
4000 {
4001 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4002 expr = build_fold_addr_expr (expr);
4003 expr = force_gimple_operand_gsi (&gsi, expr,
4004 true, NULL, true, GSI_SAME_STMT);
4005 }
4006 vargs.quick_push (expr);
4007 }
4008 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4009 {
4010 unsigned int ix;
4011 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4012 gimple def_temp;
4013
4014 arg = gimple_call_arg (stmt, adj->base_index);
4015 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4016 {
4017 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4018 continue;
4019 arg = fold_convert_loc (gimple_location (stmt),
4020 TREE_TYPE (origin), arg);
4021 }
4022 if (debug_args == NULL)
4023 debug_args = decl_debug_args_insert (callee_decl);
4024 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4025 if (ddecl == origin)
4026 {
4027 ddecl = (**debug_args)[ix + 1];
4028 break;
4029 }
4030 if (ddecl == NULL)
4031 {
4032 ddecl = make_node (DEBUG_EXPR_DECL);
4033 DECL_ARTIFICIAL (ddecl) = 1;
4034 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4035 DECL_MODE (ddecl) = DECL_MODE (origin);
4036
4037 vec_safe_push (*debug_args, origin);
4038 vec_safe_push (*debug_args, ddecl);
4039 }
4040 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4041 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4042 }
4043 }
4044
4045 if (dump_file && (dump_flags & TDF_DETAILS))
4046 {
4047 fprintf (dump_file, "replacing stmt:");
4048 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4049 }
4050
4051 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4052 vargs.release ();
4053 if (gimple_call_lhs (stmt))
4054 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4055
4056 gimple_set_block (new_stmt, gimple_block (stmt));
4057 if (gimple_has_location (stmt))
4058 gimple_set_location (new_stmt, gimple_location (stmt));
4059 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4060 gimple_call_copy_flags (new_stmt, stmt);
4061 if (gimple_in_ssa_p (cfun))
4062 {
4063 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4064 if (gimple_vdef (stmt))
4065 {
4066 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4067 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4068 }
4069 }
4070
4071 if (dump_file && (dump_flags & TDF_DETAILS))
4072 {
4073 fprintf (dump_file, "with stmt:");
4074 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4075 fprintf (dump_file, "\n");
4076 }
4077 gsi_replace (&gsi, new_stmt, true);
4078 if (cs)
4079 cgraph_set_call_stmt (cs, new_stmt);
4080 do
4081 {
4082 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
4083 gsi_prev (&gsi);
4084 }
4085 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4086 }
4087
4088 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4089 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4090 specifies whether the function should care about type incompatibility the
4091 current and new expressions. If it is false, the function will leave
4092 incompatibility issues to the caller. Return true iff the expression
4093 was modified. */
4094
4095 bool
4096 ipa_modify_expr (tree *expr, bool convert,
4097 ipa_parm_adjustment_vec adjustments)
4098 {
4099 struct ipa_parm_adjustment *cand
4100 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4101 if (!cand)
4102 return false;
4103
4104 tree src;
4105 if (cand->by_ref)
4106 src = build_simple_mem_ref (cand->new_decl);
4107 else
4108 src = cand->new_decl;
4109
4110 if (dump_file && (dump_flags & TDF_DETAILS))
4111 {
4112 fprintf (dump_file, "About to replace expr ");
4113 print_generic_expr (dump_file, *expr, 0);
4114 fprintf (dump_file, " with ");
4115 print_generic_expr (dump_file, src, 0);
4116 fprintf (dump_file, "\n");
4117 }
4118
4119 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4120 {
4121 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4122 *expr = vce;
4123 }
4124 else
4125 *expr = src;
4126 return true;
4127 }
4128
4129 /* If T is an SSA_NAME, return NULL if it is not a default def or
4130 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4131 the base variable is always returned, regardless if it is a default
4132 def. Return T if it is not an SSA_NAME. */
4133
4134 static tree
4135 get_ssa_base_param (tree t, bool ignore_default_def)
4136 {
4137 if (TREE_CODE (t) == SSA_NAME)
4138 {
4139 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4140 return SSA_NAME_VAR (t);
4141 else
4142 return NULL_TREE;
4143 }
4144 return t;
4145 }
4146
4147 /* Given an expression, return an adjustment entry specifying the
4148 transformation to be done on EXPR. If no suitable adjustment entry
4149 was found, returns NULL.
4150
4151 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4152 default def, otherwise bail on them.
4153
4154 If CONVERT is non-NULL, this function will set *CONVERT if the
4155 expression provided is a component reference. ADJUSTMENTS is the
4156 adjustments vector. */
4157
4158 ipa_parm_adjustment *
4159 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4160 ipa_parm_adjustment_vec adjustments,
4161 bool ignore_default_def)
4162 {
4163 if (TREE_CODE (**expr) == BIT_FIELD_REF
4164 || TREE_CODE (**expr) == IMAGPART_EXPR
4165 || TREE_CODE (**expr) == REALPART_EXPR)
4166 {
4167 *expr = &TREE_OPERAND (**expr, 0);
4168 if (convert)
4169 *convert = true;
4170 }
4171
4172 HOST_WIDE_INT offset, size, max_size;
4173 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4174 if (!base || size == -1 || max_size == -1)
4175 return NULL;
4176
4177 if (TREE_CODE (base) == MEM_REF)
4178 {
4179 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4180 base = TREE_OPERAND (base, 0);
4181 }
4182
4183 base = get_ssa_base_param (base, ignore_default_def);
4184 if (!base || TREE_CODE (base) != PARM_DECL)
4185 return NULL;
4186
4187 struct ipa_parm_adjustment *cand = NULL;
4188 unsigned int len = adjustments.length ();
4189 for (unsigned i = 0; i < len; i++)
4190 {
4191 struct ipa_parm_adjustment *adj = &adjustments[i];
4192
4193 if (adj->base == base
4194 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4195 {
4196 cand = adj;
4197 break;
4198 }
4199 }
4200
4201 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4202 return NULL;
4203 return cand;
4204 }
4205
4206 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4207
4208 static bool
4209 index_in_adjustments_multiple_times_p (int base_index,
4210 ipa_parm_adjustment_vec adjustments)
4211 {
4212 int i, len = adjustments.length ();
4213 bool one = false;
4214
4215 for (i = 0; i < len; i++)
4216 {
4217 struct ipa_parm_adjustment *adj;
4218 adj = &adjustments[i];
4219
4220 if (adj->base_index == base_index)
4221 {
4222 if (one)
4223 return true;
4224 else
4225 one = true;
4226 }
4227 }
4228 return false;
4229 }
4230
4231
4232 /* Return adjustments that should have the same effect on function parameters
4233 and call arguments as if they were first changed according to adjustments in
4234 INNER and then by adjustments in OUTER. */
4235
4236 ipa_parm_adjustment_vec
4237 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4238 ipa_parm_adjustment_vec outer)
4239 {
4240 int i, outlen = outer.length ();
4241 int inlen = inner.length ();
4242 int removals = 0;
4243 ipa_parm_adjustment_vec adjustments, tmp;
4244
4245 tmp.create (inlen);
4246 for (i = 0; i < inlen; i++)
4247 {
4248 struct ipa_parm_adjustment *n;
4249 n = &inner[i];
4250
4251 if (n->op == IPA_PARM_OP_REMOVE)
4252 removals++;
4253 else
4254 {
4255 /* FIXME: Handling of new arguments are not implemented yet. */
4256 gcc_assert (n->op != IPA_PARM_OP_NEW);
4257 tmp.quick_push (*n);
4258 }
4259 }
4260
4261 adjustments.create (outlen + removals);
4262 for (i = 0; i < outlen; i++)
4263 {
4264 struct ipa_parm_adjustment r;
4265 struct ipa_parm_adjustment *out = &outer[i];
4266 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4267
4268 memset (&r, 0, sizeof (r));
4269 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4270 if (out->op == IPA_PARM_OP_REMOVE)
4271 {
4272 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4273 {
4274 r.op = IPA_PARM_OP_REMOVE;
4275 adjustments.quick_push (r);
4276 }
4277 continue;
4278 }
4279 else
4280 {
4281 /* FIXME: Handling of new arguments are not implemented yet. */
4282 gcc_assert (out->op != IPA_PARM_OP_NEW);
4283 }
4284
4285 r.base_index = in->base_index;
4286 r.type = out->type;
4287
4288 /* FIXME: Create nonlocal value too. */
4289
4290 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4291 r.op = IPA_PARM_OP_COPY;
4292 else if (in->op == IPA_PARM_OP_COPY)
4293 r.offset = out->offset;
4294 else if (out->op == IPA_PARM_OP_COPY)
4295 r.offset = in->offset;
4296 else
4297 r.offset = in->offset + out->offset;
4298 adjustments.quick_push (r);
4299 }
4300
4301 for (i = 0; i < inlen; i++)
4302 {
4303 struct ipa_parm_adjustment *n = &inner[i];
4304
4305 if (n->op == IPA_PARM_OP_REMOVE)
4306 adjustments.quick_push (*n);
4307 }
4308
4309 tmp.release ();
4310 return adjustments;
4311 }
4312
4313 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4314 friendly way, assuming they are meant to be applied to FNDECL. */
4315
4316 void
4317 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4318 tree fndecl)
4319 {
4320 int i, len = adjustments.length ();
4321 bool first = true;
4322 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4323
4324 fprintf (file, "IPA param adjustments: ");
4325 for (i = 0; i < len; i++)
4326 {
4327 struct ipa_parm_adjustment *adj;
4328 adj = &adjustments[i];
4329
4330 if (!first)
4331 fprintf (file, " ");
4332 else
4333 first = false;
4334
4335 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4336 print_generic_expr (file, parms[adj->base_index], 0);
4337 if (adj->base)
4338 {
4339 fprintf (file, ", base: ");
4340 print_generic_expr (file, adj->base, 0);
4341 }
4342 if (adj->new_decl)
4343 {
4344 fprintf (file, ", new_decl: ");
4345 print_generic_expr (file, adj->new_decl, 0);
4346 }
4347 if (adj->new_ssa_base)
4348 {
4349 fprintf (file, ", new_ssa_base: ");
4350 print_generic_expr (file, adj->new_ssa_base, 0);
4351 }
4352
4353 if (adj->op == IPA_PARM_OP_COPY)
4354 fprintf (file, ", copy_param");
4355 else if (adj->op == IPA_PARM_OP_REMOVE)
4356 fprintf (file, ", remove_param");
4357 else
4358 fprintf (file, ", offset %li", (long) adj->offset);
4359 if (adj->by_ref)
4360 fprintf (file, ", by_ref");
4361 print_node_brief (file, ", type: ", adj->type, 0);
4362 fprintf (file, "\n");
4363 }
4364 parms.release ();
4365 }
4366
4367 /* Dump the AV linked list. */
4368
4369 void
4370 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4371 {
4372 bool comma = false;
4373 fprintf (f, " Aggregate replacements:");
4374 for (; av; av = av->next)
4375 {
4376 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4377 av->index, av->offset);
4378 print_generic_expr (f, av->value, 0);
4379 comma = true;
4380 }
4381 fprintf (f, "\n");
4382 }
4383
4384 /* Stream out jump function JUMP_FUNC to OB. */
4385
4386 static void
4387 ipa_write_jump_function (struct output_block *ob,
4388 struct ipa_jump_func *jump_func)
4389 {
4390 struct ipa_agg_jf_item *item;
4391 struct bitpack_d bp;
4392 int i, count;
4393
4394 streamer_write_uhwi (ob, jump_func->type);
4395 switch (jump_func->type)
4396 {
4397 case IPA_JF_UNKNOWN:
4398 break;
4399 case IPA_JF_KNOWN_TYPE:
4400 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4401 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4402 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4403 break;
4404 case IPA_JF_CONST:
4405 gcc_assert (
4406 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4407 stream_write_tree (ob, jump_func->value.constant.value, true);
4408 break;
4409 case IPA_JF_PASS_THROUGH:
4410 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4411 if (jump_func->value.pass_through.operation == NOP_EXPR)
4412 {
4413 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4414 bp = bitpack_create (ob->main_stream);
4415 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4416 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4417 streamer_write_bitpack (&bp);
4418 }
4419 else
4420 {
4421 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4422 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4423 }
4424 break;
4425 case IPA_JF_ANCESTOR:
4426 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4427 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4428 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4429 bp = bitpack_create (ob->main_stream);
4430 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4431 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4432 streamer_write_bitpack (&bp);
4433 break;
4434 }
4435
4436 count = vec_safe_length (jump_func->agg.items);
4437 streamer_write_uhwi (ob, count);
4438 if (count)
4439 {
4440 bp = bitpack_create (ob->main_stream);
4441 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4442 streamer_write_bitpack (&bp);
4443 }
4444
4445 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4446 {
4447 streamer_write_uhwi (ob, item->offset);
4448 stream_write_tree (ob, item->value, true);
4449 }
4450 }
4451
4452 /* Read in jump function JUMP_FUNC from IB. */
4453
4454 static void
4455 ipa_read_jump_function (struct lto_input_block *ib,
4456 struct ipa_jump_func *jump_func,
4457 struct cgraph_edge *cs,
4458 struct data_in *data_in)
4459 {
4460 enum jump_func_type jftype;
4461 enum tree_code operation;
4462 int i, count;
4463
4464 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4465 switch (jftype)
4466 {
4467 case IPA_JF_UNKNOWN:
4468 jump_func->type = IPA_JF_UNKNOWN;
4469 break;
4470 case IPA_JF_KNOWN_TYPE:
4471 {
4472 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4473 tree base_type = stream_read_tree (ib, data_in);
4474 tree component_type = stream_read_tree (ib, data_in);
4475
4476 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4477 break;
4478 }
4479 case IPA_JF_CONST:
4480 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4481 break;
4482 case IPA_JF_PASS_THROUGH:
4483 operation = (enum tree_code) streamer_read_uhwi (ib);
4484 if (operation == NOP_EXPR)
4485 {
4486 int formal_id = streamer_read_uhwi (ib);
4487 struct bitpack_d bp = streamer_read_bitpack (ib);
4488 bool agg_preserved = bp_unpack_value (&bp, 1);
4489 bool type_preserved = bp_unpack_value (&bp, 1);
4490 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4491 type_preserved);
4492 }
4493 else
4494 {
4495 tree operand = stream_read_tree (ib, data_in);
4496 int formal_id = streamer_read_uhwi (ib);
4497 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4498 operation);
4499 }
4500 break;
4501 case IPA_JF_ANCESTOR:
4502 {
4503 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4504 tree type = stream_read_tree (ib, data_in);
4505 int formal_id = streamer_read_uhwi (ib);
4506 struct bitpack_d bp = streamer_read_bitpack (ib);
4507 bool agg_preserved = bp_unpack_value (&bp, 1);
4508 bool type_preserved = bp_unpack_value (&bp, 1);
4509
4510 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4511 type_preserved);
4512 break;
4513 }
4514 }
4515
4516 count = streamer_read_uhwi (ib);
4517 vec_alloc (jump_func->agg.items, count);
4518 if (count)
4519 {
4520 struct bitpack_d bp = streamer_read_bitpack (ib);
4521 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4522 }
4523 for (i = 0; i < count; i++)
4524 {
4525 struct ipa_agg_jf_item item;
4526 item.offset = streamer_read_uhwi (ib);
4527 item.value = stream_read_tree (ib, data_in);
4528 jump_func->agg.items->quick_push (item);
4529 }
4530 }
4531
4532 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4533 relevant to indirect inlining to OB. */
4534
4535 static void
4536 ipa_write_indirect_edge_info (struct output_block *ob,
4537 struct cgraph_edge *cs)
4538 {
4539 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4540 struct bitpack_d bp;
4541
4542 streamer_write_hwi (ob, ii->param_index);
4543 streamer_write_hwi (ob, ii->offset);
4544 bp = bitpack_create (ob->main_stream);
4545 bp_pack_value (&bp, ii->polymorphic, 1);
4546 bp_pack_value (&bp, ii->agg_contents, 1);
4547 bp_pack_value (&bp, ii->member_ptr, 1);
4548 bp_pack_value (&bp, ii->by_ref, 1);
4549 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4550 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4551 streamer_write_bitpack (&bp);
4552
4553 if (ii->polymorphic)
4554 {
4555 streamer_write_hwi (ob, ii->otr_token);
4556 stream_write_tree (ob, ii->otr_type, true);
4557 stream_write_tree (ob, ii->outer_type, true);
4558 }
4559 }
4560
4561 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4562 relevant to indirect inlining from IB. */
4563
4564 static void
4565 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4566 struct data_in *data_in ATTRIBUTE_UNUSED,
4567 struct cgraph_edge *cs)
4568 {
4569 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4570 struct bitpack_d bp;
4571
4572 ii->param_index = (int) streamer_read_hwi (ib);
4573 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4574 bp = streamer_read_bitpack (ib);
4575 ii->polymorphic = bp_unpack_value (&bp, 1);
4576 ii->agg_contents = bp_unpack_value (&bp, 1);
4577 ii->member_ptr = bp_unpack_value (&bp, 1);
4578 ii->by_ref = bp_unpack_value (&bp, 1);
4579 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4580 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4581 if (ii->polymorphic)
4582 {
4583 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4584 ii->otr_type = stream_read_tree (ib, data_in);
4585 ii->outer_type = stream_read_tree (ib, data_in);
4586 }
4587 }
4588
4589 /* Stream out NODE info to OB. */
4590
4591 static void
4592 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4593 {
4594 int node_ref;
4595 lto_symtab_encoder_t encoder;
4596 struct ipa_node_params *info = IPA_NODE_REF (node);
4597 int j;
4598 struct cgraph_edge *e;
4599 struct bitpack_d bp;
4600
4601 encoder = ob->decl_state->symtab_node_encoder;
4602 node_ref = lto_symtab_encoder_encode (encoder, node);
4603 streamer_write_uhwi (ob, node_ref);
4604
4605 streamer_write_uhwi (ob, ipa_get_param_count (info));
4606 for (j = 0; j < ipa_get_param_count (info); j++)
4607 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4608 bp = bitpack_create (ob->main_stream);
4609 gcc_assert (info->analysis_done
4610 || ipa_get_param_count (info) == 0);
4611 gcc_assert (!info->node_enqueued);
4612 gcc_assert (!info->ipcp_orig_node);
4613 for (j = 0; j < ipa_get_param_count (info); j++)
4614 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4615 streamer_write_bitpack (&bp);
4616 for (j = 0; j < ipa_get_param_count (info); j++)
4617 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4618 for (e = node->callees; e; e = e->next_callee)
4619 {
4620 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4621
4622 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4623 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4624 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4625 }
4626 for (e = node->indirect_calls; e; e = e->next_callee)
4627 {
4628 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4629
4630 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4631 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4632 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4633 ipa_write_indirect_edge_info (ob, e);
4634 }
4635 }
4636
4637 /* Stream in NODE info from IB. */
4638
4639 static void
4640 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4641 struct data_in *data_in)
4642 {
4643 struct ipa_node_params *info = IPA_NODE_REF (node);
4644 int k;
4645 struct cgraph_edge *e;
4646 struct bitpack_d bp;
4647
4648 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4649
4650 for (k = 0; k < ipa_get_param_count (info); k++)
4651 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4652
4653 bp = streamer_read_bitpack (ib);
4654 if (ipa_get_param_count (info) != 0)
4655 info->analysis_done = true;
4656 info->node_enqueued = false;
4657 for (k = 0; k < ipa_get_param_count (info); k++)
4658 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4659 for (k = 0; k < ipa_get_param_count (info); k++)
4660 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4661 for (e = node->callees; e; e = e->next_callee)
4662 {
4663 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4664 int count = streamer_read_uhwi (ib);
4665
4666 if (!count)
4667 continue;
4668 vec_safe_grow_cleared (args->jump_functions, count);
4669
4670 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4671 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4672 data_in);
4673 }
4674 for (e = node->indirect_calls; e; e = e->next_callee)
4675 {
4676 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4677 int count = streamer_read_uhwi (ib);
4678
4679 if (count)
4680 {
4681 vec_safe_grow_cleared (args->jump_functions, count);
4682 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4683 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4684 data_in);
4685 }
4686 ipa_read_indirect_edge_info (ib, data_in, e);
4687 }
4688 }
4689
4690 /* Write jump functions for nodes in SET. */
4691
4692 void
4693 ipa_prop_write_jump_functions (void)
4694 {
4695 struct cgraph_node *node;
4696 struct output_block *ob;
4697 unsigned int count = 0;
4698 lto_symtab_encoder_iterator lsei;
4699 lto_symtab_encoder_t encoder;
4700
4701
4702 if (!ipa_node_params_vector.exists ())
4703 return;
4704
4705 ob = create_output_block (LTO_section_jump_functions);
4706 encoder = ob->decl_state->symtab_node_encoder;
4707 ob->cgraph_node = NULL;
4708 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4709 lsei_next_function_in_partition (&lsei))
4710 {
4711 node = lsei_cgraph_node (lsei);
4712 if (cgraph_function_with_gimple_body_p (node)
4713 && IPA_NODE_REF (node) != NULL)
4714 count++;
4715 }
4716
4717 streamer_write_uhwi (ob, count);
4718
4719 /* Process all of the functions. */
4720 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4721 lsei_next_function_in_partition (&lsei))
4722 {
4723 node = lsei_cgraph_node (lsei);
4724 if (cgraph_function_with_gimple_body_p (node)
4725 && IPA_NODE_REF (node) != NULL)
4726 ipa_write_node_info (ob, node);
4727 }
4728 streamer_write_char_stream (ob->main_stream, 0);
4729 produce_asm (ob, NULL);
4730 destroy_output_block (ob);
4731 }
4732
4733 /* Read section in file FILE_DATA of length LEN with data DATA. */
4734
4735 static void
4736 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4737 size_t len)
4738 {
4739 const struct lto_function_header *header =
4740 (const struct lto_function_header *) data;
4741 const int cfg_offset = sizeof (struct lto_function_header);
4742 const int main_offset = cfg_offset + header->cfg_size;
4743 const int string_offset = main_offset + header->main_size;
4744 struct data_in *data_in;
4745 struct lto_input_block ib_main;
4746 unsigned int i;
4747 unsigned int count;
4748
4749 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4750 header->main_size);
4751
4752 data_in =
4753 lto_data_in_create (file_data, (const char *) data + string_offset,
4754 header->string_size, vNULL);
4755 count = streamer_read_uhwi (&ib_main);
4756
4757 for (i = 0; i < count; i++)
4758 {
4759 unsigned int index;
4760 struct cgraph_node *node;
4761 lto_symtab_encoder_t encoder;
4762
4763 index = streamer_read_uhwi (&ib_main);
4764 encoder = file_data->symtab_node_encoder;
4765 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4766 gcc_assert (node->definition);
4767 ipa_read_node_info (&ib_main, node, data_in);
4768 }
4769 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4770 len);
4771 lto_data_in_delete (data_in);
4772 }
4773
4774 /* Read ipcp jump functions. */
4775
4776 void
4777 ipa_prop_read_jump_functions (void)
4778 {
4779 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4780 struct lto_file_decl_data *file_data;
4781 unsigned int j = 0;
4782
4783 ipa_check_create_node_params ();
4784 ipa_check_create_edge_args ();
4785 ipa_register_cgraph_hooks ();
4786
4787 while ((file_data = file_data_vec[j++]))
4788 {
4789 size_t len;
4790 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4791
4792 if (data)
4793 ipa_prop_read_section (file_data, data, len);
4794 }
4795 }
4796
4797 /* After merging units, we can get mismatch in argument counts.
4798 Also decl merging might've rendered parameter lists obsolete.
4799 Also compute called_with_variable_arg info. */
4800
4801 void
4802 ipa_update_after_lto_read (void)
4803 {
4804 ipa_check_create_node_params ();
4805 ipa_check_create_edge_args ();
4806 }
4807
4808 void
4809 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4810 {
4811 int node_ref;
4812 unsigned int count = 0;
4813 lto_symtab_encoder_t encoder;
4814 struct ipa_agg_replacement_value *aggvals, *av;
4815
4816 aggvals = ipa_get_agg_replacements_for_node (node);
4817 encoder = ob->decl_state->symtab_node_encoder;
4818 node_ref = lto_symtab_encoder_encode (encoder, node);
4819 streamer_write_uhwi (ob, node_ref);
4820
4821 for (av = aggvals; av; av = av->next)
4822 count++;
4823 streamer_write_uhwi (ob, count);
4824
4825 for (av = aggvals; av; av = av->next)
4826 {
4827 struct bitpack_d bp;
4828
4829 streamer_write_uhwi (ob, av->offset);
4830 streamer_write_uhwi (ob, av->index);
4831 stream_write_tree (ob, av->value, true);
4832
4833 bp = bitpack_create (ob->main_stream);
4834 bp_pack_value (&bp, av->by_ref, 1);
4835 streamer_write_bitpack (&bp);
4836 }
4837 }
4838
4839 /* Stream in the aggregate value replacement chain for NODE from IB. */
4840
4841 static void
4842 read_agg_replacement_chain (struct lto_input_block *ib,
4843 struct cgraph_node *node,
4844 struct data_in *data_in)
4845 {
4846 struct ipa_agg_replacement_value *aggvals = NULL;
4847 unsigned int count, i;
4848
4849 count = streamer_read_uhwi (ib);
4850 for (i = 0; i <count; i++)
4851 {
4852 struct ipa_agg_replacement_value *av;
4853 struct bitpack_d bp;
4854
4855 av = ggc_alloc<ipa_agg_replacement_value> ();
4856 av->offset = streamer_read_uhwi (ib);
4857 av->index = streamer_read_uhwi (ib);
4858 av->value = stream_read_tree (ib, data_in);
4859 bp = streamer_read_bitpack (ib);
4860 av->by_ref = bp_unpack_value (&bp, 1);
4861 av->next = aggvals;
4862 aggvals = av;
4863 }
4864 ipa_set_node_agg_value_chain (node, aggvals);
4865 }
4866
4867 /* Write all aggregate replacement for nodes in set. */
4868
4869 void
4870 ipa_prop_write_all_agg_replacement (void)
4871 {
4872 struct cgraph_node *node;
4873 struct output_block *ob;
4874 unsigned int count = 0;
4875 lto_symtab_encoder_iterator lsei;
4876 lto_symtab_encoder_t encoder;
4877
4878 if (!ipa_node_agg_replacements)
4879 return;
4880
4881 ob = create_output_block (LTO_section_ipcp_transform);
4882 encoder = ob->decl_state->symtab_node_encoder;
4883 ob->cgraph_node = NULL;
4884 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4885 lsei_next_function_in_partition (&lsei))
4886 {
4887 node = lsei_cgraph_node (lsei);
4888 if (cgraph_function_with_gimple_body_p (node)
4889 && ipa_get_agg_replacements_for_node (node) != NULL)
4890 count++;
4891 }
4892
4893 streamer_write_uhwi (ob, count);
4894
4895 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4896 lsei_next_function_in_partition (&lsei))
4897 {
4898 node = lsei_cgraph_node (lsei);
4899 if (cgraph_function_with_gimple_body_p (node)
4900 && ipa_get_agg_replacements_for_node (node) != NULL)
4901 write_agg_replacement_chain (ob, node);
4902 }
4903 streamer_write_char_stream (ob->main_stream, 0);
4904 produce_asm (ob, NULL);
4905 destroy_output_block (ob);
4906 }
4907
4908 /* Read replacements section in file FILE_DATA of length LEN with data
4909 DATA. */
4910
4911 static void
4912 read_replacements_section (struct lto_file_decl_data *file_data,
4913 const char *data,
4914 size_t len)
4915 {
4916 const struct lto_function_header *header =
4917 (const struct lto_function_header *) data;
4918 const int cfg_offset = sizeof (struct lto_function_header);
4919 const int main_offset = cfg_offset + header->cfg_size;
4920 const int string_offset = main_offset + header->main_size;
4921 struct data_in *data_in;
4922 struct lto_input_block ib_main;
4923 unsigned int i;
4924 unsigned int count;
4925
4926 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4927 header->main_size);
4928
4929 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4930 header->string_size, vNULL);
4931 count = streamer_read_uhwi (&ib_main);
4932
4933 for (i = 0; i < count; i++)
4934 {
4935 unsigned int index;
4936 struct cgraph_node *node;
4937 lto_symtab_encoder_t encoder;
4938
4939 index = streamer_read_uhwi (&ib_main);
4940 encoder = file_data->symtab_node_encoder;
4941 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4942 gcc_assert (node->definition);
4943 read_agg_replacement_chain (&ib_main, node, data_in);
4944 }
4945 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4946 len);
4947 lto_data_in_delete (data_in);
4948 }
4949
4950 /* Read IPA-CP aggregate replacements. */
4951
4952 void
4953 ipa_prop_read_all_agg_replacement (void)
4954 {
4955 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4956 struct lto_file_decl_data *file_data;
4957 unsigned int j = 0;
4958
4959 while ((file_data = file_data_vec[j++]))
4960 {
4961 size_t len;
4962 const char *data = lto_get_section_data (file_data,
4963 LTO_section_ipcp_transform,
4964 NULL, &len);
4965 if (data)
4966 read_replacements_section (file_data, data, len);
4967 }
4968 }
4969
4970 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4971 NODE. */
4972
4973 static void
4974 adjust_agg_replacement_values (struct cgraph_node *node,
4975 struct ipa_agg_replacement_value *aggval)
4976 {
4977 struct ipa_agg_replacement_value *v;
4978 int i, c = 0, d = 0, *adj;
4979
4980 if (!node->clone.combined_args_to_skip)
4981 return;
4982
4983 for (v = aggval; v; v = v->next)
4984 {
4985 gcc_assert (v->index >= 0);
4986 if (c < v->index)
4987 c = v->index;
4988 }
4989 c++;
4990
4991 adj = XALLOCAVEC (int, c);
4992 for (i = 0; i < c; i++)
4993 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4994 {
4995 adj[i] = -1;
4996 d++;
4997 }
4998 else
4999 adj[i] = i - d;
5000
5001 for (v = aggval; v; v = v->next)
5002 v->index = adj[v->index];
5003 }
5004
5005 /* Dominator walker driving the ipcp modification phase. */
5006
5007 class ipcp_modif_dom_walker : public dom_walker
5008 {
5009 public:
5010 ipcp_modif_dom_walker (struct func_body_info *fbi,
5011 vec<ipa_param_descriptor> descs,
5012 struct ipa_agg_replacement_value *av,
5013 bool *sc, bool *cc)
5014 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5015 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5016
5017 virtual void before_dom_children (basic_block);
5018
5019 private:
5020 struct func_body_info *m_fbi;
5021 vec<ipa_param_descriptor> m_descriptors;
5022 struct ipa_agg_replacement_value *m_aggval;
5023 bool *m_something_changed, *m_cfg_changed;
5024 };
5025
5026 void
5027 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5028 {
5029 gimple_stmt_iterator gsi;
5030 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5031 {
5032 struct ipa_agg_replacement_value *v;
5033 gimple stmt = gsi_stmt (gsi);
5034 tree rhs, val, t;
5035 HOST_WIDE_INT offset, size;
5036 int index;
5037 bool by_ref, vce;
5038
5039 if (!gimple_assign_load_p (stmt))
5040 continue;
5041 rhs = gimple_assign_rhs1 (stmt);
5042 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5043 continue;
5044
5045 vce = false;
5046 t = rhs;
5047 while (handled_component_p (t))
5048 {
5049 /* V_C_E can do things like convert an array of integers to one
5050 bigger integer and similar things we do not handle below. */
5051 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5052 {
5053 vce = true;
5054 break;
5055 }
5056 t = TREE_OPERAND (t, 0);
5057 }
5058 if (vce)
5059 continue;
5060
5061 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5062 &offset, &size, &by_ref))
5063 continue;
5064 for (v = m_aggval; v; v = v->next)
5065 if (v->index == index
5066 && v->offset == offset)
5067 break;
5068 if (!v
5069 || v->by_ref != by_ref
5070 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5071 continue;
5072
5073 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5074 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5075 {
5076 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5077 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5078 else if (TYPE_SIZE (TREE_TYPE (rhs))
5079 == TYPE_SIZE (TREE_TYPE (v->value)))
5080 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5081 else
5082 {
5083 if (dump_file)
5084 {
5085 fprintf (dump_file, " const ");
5086 print_generic_expr (dump_file, v->value, 0);
5087 fprintf (dump_file, " can't be converted to type of ");
5088 print_generic_expr (dump_file, rhs, 0);
5089 fprintf (dump_file, "\n");
5090 }
5091 continue;
5092 }
5093 }
5094 else
5095 val = v->value;
5096
5097 if (dump_file && (dump_flags & TDF_DETAILS))
5098 {
5099 fprintf (dump_file, "Modifying stmt:\n ");
5100 print_gimple_stmt (dump_file, stmt, 0, 0);
5101 }
5102 gimple_assign_set_rhs_from_tree (&gsi, val);
5103 update_stmt (stmt);
5104
5105 if (dump_file && (dump_flags & TDF_DETAILS))
5106 {
5107 fprintf (dump_file, "into:\n ");
5108 print_gimple_stmt (dump_file, stmt, 0, 0);
5109 fprintf (dump_file, "\n");
5110 }
5111
5112 *m_something_changed = true;
5113 if (maybe_clean_eh_stmt (stmt)
5114 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5115 *m_cfg_changed = true;
5116 }
5117
5118 }
5119
5120 /* IPCP transformation phase doing propagation of aggregate values. */
5121
5122 unsigned int
5123 ipcp_transform_function (struct cgraph_node *node)
5124 {
5125 vec<ipa_param_descriptor> descriptors = vNULL;
5126 struct func_body_info fbi;
5127 struct ipa_agg_replacement_value *aggval;
5128 int param_count;
5129 bool cfg_changed = false, something_changed = false;
5130
5131 gcc_checking_assert (cfun);
5132 gcc_checking_assert (current_function_decl);
5133
5134 if (dump_file)
5135 fprintf (dump_file, "Modification phase of node %s/%i\n",
5136 node->name (), node->order);
5137
5138 aggval = ipa_get_agg_replacements_for_node (node);
5139 if (!aggval)
5140 return 0;
5141 param_count = count_formal_params (node->decl);
5142 if (param_count == 0)
5143 return 0;
5144 adjust_agg_replacement_values (node, aggval);
5145 if (dump_file)
5146 ipa_dump_agg_replacement_values (dump_file, aggval);
5147
5148 fbi.node = node;
5149 fbi.info = NULL;
5150 fbi.bb_infos = vNULL;
5151 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5152 fbi.param_count = param_count;
5153 fbi.aa_walked = 0;
5154
5155 descriptors.safe_grow_cleared (param_count);
5156 ipa_populate_param_decls (node, descriptors);
5157 calculate_dominance_info (CDI_DOMINATORS);
5158 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5159 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5160
5161 int i;
5162 struct ipa_bb_info *bi;
5163 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5164 free_ipa_bb_info (bi);
5165 fbi.bb_infos.release ();
5166 free_dominance_info (CDI_DOMINATORS);
5167 (*ipa_node_agg_replacements)[node->uid] = NULL;
5168 descriptors.release ();
5169
5170 if (!something_changed)
5171 return 0;
5172 else if (cfg_changed)
5173 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5174 else
5175 return TODO_update_ssa_only_virtuals;
5176 }