Fix for devirtualization dump functions
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "dbgcnt.h"
63 #include "domwalk.h"
64 #include "builtins.h"
65
66 /* Intermediate information that we get from alias analysis about a particular
67 parameter in a particular basic_block. When a parameter or the memory it
68 references is marked modified, we use that information in all dominatd
69 blocks without cosulting alias analysis oracle. */
70
71 struct param_aa_status
72 {
73 /* Set when this structure contains meaningful information. If not, the
74 structure describing a dominating BB should be used instead. */
75 bool valid;
76
77 /* Whether we have seen something which might have modified the data in
78 question. PARM is for the parameter itself, REF is for data it points to
79 but using the alias type of individual accesses and PT is the same thing
80 but for computing aggregate pass-through functions using a very inclusive
81 ao_ref. */
82 bool parm_modified, ref_modified, pt_modified;
83 };
84
85 /* Information related to a given BB that used only when looking at function
86 body. */
87
88 struct ipa_bb_info
89 {
90 /* Call graph edges going out of this BB. */
91 vec<cgraph_edge_p> cg_edges;
92 /* Alias analysis statuses of each formal parameter at this bb. */
93 vec<param_aa_status> param_aa_statuses;
94 };
95
96 /* Structure with global information that is only used when looking at function
97 body. */
98
99 struct func_body_info
100 {
101 /* The node that is being analyzed. */
102 cgraph_node *node;
103
104 /* Its info. */
105 struct ipa_node_params *info;
106
107 /* Information about individual BBs. */
108 vec<ipa_bb_info> bb_infos;
109
110 /* Number of parameters. */
111 int param_count;
112
113 /* Number of statements already walked by when analyzing this function. */
114 unsigned int aa_walked;
115 };
116
117 /* Vector where the parameter infos are actually stored. */
118 vec<ipa_node_params> ipa_node_params_vector;
119 /* Vector of known aggregate values in cloned nodes. */
120 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
121 /* Vector where the parameter infos are actually stored. */
122 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
123
124 /* Holders of ipa cgraph hooks: */
125 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
126 static struct cgraph_node_hook_list *node_removal_hook_holder;
127 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
128 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
129 static struct cgraph_node_hook_list *function_insertion_hook_holder;
130
131 /* Description of a reference to an IPA constant. */
132 struct ipa_cst_ref_desc
133 {
134 /* Edge that corresponds to the statement which took the reference. */
135 struct cgraph_edge *cs;
136 /* Linked list of duplicates created when call graph edges are cloned. */
137 struct ipa_cst_ref_desc *next_duplicate;
138 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
139 if out of control. */
140 int refcount;
141 };
142
143 /* Allocation pool for reference descriptions. */
144
145 static alloc_pool ipa_refdesc_pool;
146
147 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
148 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
149
150 static bool
151 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
152 {
153 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
154 struct cl_optimization *os;
155
156 if (!fs_opts)
157 return false;
158 os = TREE_OPTIMIZATION (fs_opts);
159 return !os->x_optimize || !os->x_flag_ipa_cp;
160 }
161
162 /* Return index of the formal whose tree is PTREE in function which corresponds
163 to INFO. */
164
165 static int
166 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
167 {
168 int i, count;
169
170 count = descriptors.length ();
171 for (i = 0; i < count; i++)
172 if (descriptors[i].decl == ptree)
173 return i;
174
175 return -1;
176 }
177
178 /* Return index of the formal whose tree is PTREE in function which corresponds
179 to INFO. */
180
181 int
182 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
183 {
184 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
185 }
186
187 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
188 NODE. */
189
190 static void
191 ipa_populate_param_decls (struct cgraph_node *node,
192 vec<ipa_param_descriptor> &descriptors)
193 {
194 tree fndecl;
195 tree fnargs;
196 tree parm;
197 int param_num;
198
199 fndecl = node->decl;
200 gcc_assert (gimple_has_body_p (fndecl));
201 fnargs = DECL_ARGUMENTS (fndecl);
202 param_num = 0;
203 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
204 {
205 descriptors[param_num].decl = parm;
206 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
207 param_num++;
208 }
209 }
210
211 /* Return how many formal parameters FNDECL has. */
212
213 static inline int
214 count_formal_params (tree fndecl)
215 {
216 tree parm;
217 int count = 0;
218 gcc_assert (gimple_has_body_p (fndecl));
219
220 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
221 count++;
222
223 return count;
224 }
225
226 /* Return the declaration of Ith formal parameter of the function corresponding
227 to INFO. Note there is no setter function as this array is built just once
228 using ipa_initialize_node_params. */
229
230 void
231 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
232 {
233 fprintf (file, "param #%i", i);
234 if (info->descriptors[i].decl)
235 {
236 fprintf (file, " ");
237 print_generic_expr (file, info->descriptors[i].decl, 0);
238 }
239 }
240
241 /* Initialize the ipa_node_params structure associated with NODE
242 to hold PARAM_COUNT parameters. */
243
244 void
245 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
246 {
247 struct ipa_node_params *info = IPA_NODE_REF (node);
248
249 if (!info->descriptors.exists () && param_count)
250 info->descriptors.safe_grow_cleared (param_count);
251 }
252
253 /* Initialize the ipa_node_params structure associated with NODE by counting
254 the function parameters, creating the descriptors and populating their
255 param_decls. */
256
257 void
258 ipa_initialize_node_params (struct cgraph_node *node)
259 {
260 struct ipa_node_params *info = IPA_NODE_REF (node);
261
262 if (!info->descriptors.exists ())
263 {
264 ipa_alloc_node_params (node, count_formal_params (node->decl));
265 ipa_populate_param_decls (node, info->descriptors);
266 }
267 }
268
269 /* Print the jump functions associated with call graph edge CS to file F. */
270
271 static void
272 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
273 {
274 int i, count;
275
276 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
277 for (i = 0; i < count; i++)
278 {
279 struct ipa_jump_func *jump_func;
280 enum jump_func_type type;
281
282 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
283 type = jump_func->type;
284
285 fprintf (f, " param %d: ", i);
286 if (type == IPA_JF_UNKNOWN)
287 fprintf (f, "UNKNOWN\n");
288 else if (type == IPA_JF_KNOWN_TYPE)
289 {
290 fprintf (f, "KNOWN TYPE: base ");
291 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
292 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
293 jump_func->value.known_type.offset);
294 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
295 fprintf (f, "\n");
296 }
297 else if (type == IPA_JF_CONST)
298 {
299 tree val = jump_func->value.constant.value;
300 fprintf (f, "CONST: ");
301 print_generic_expr (f, val, 0);
302 if (TREE_CODE (val) == ADDR_EXPR
303 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
304 {
305 fprintf (f, " -> ");
306 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
307 0);
308 }
309 fprintf (f, "\n");
310 }
311 else if (type == IPA_JF_PASS_THROUGH)
312 {
313 fprintf (f, "PASS THROUGH: ");
314 fprintf (f, "%d, op %s",
315 jump_func->value.pass_through.formal_id,
316 get_tree_code_name(jump_func->value.pass_through.operation));
317 if (jump_func->value.pass_through.operation != NOP_EXPR)
318 {
319 fprintf (f, " ");
320 print_generic_expr (f,
321 jump_func->value.pass_through.operand, 0);
322 }
323 if (jump_func->value.pass_through.agg_preserved)
324 fprintf (f, ", agg_preserved");
325 if (jump_func->value.pass_through.type_preserved)
326 fprintf (f, ", type_preserved");
327 fprintf (f, "\n");
328 }
329 else if (type == IPA_JF_ANCESTOR)
330 {
331 fprintf (f, "ANCESTOR: ");
332 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
333 jump_func->value.ancestor.formal_id,
334 jump_func->value.ancestor.offset);
335 print_generic_expr (f, jump_func->value.ancestor.type, 0);
336 if (jump_func->value.ancestor.agg_preserved)
337 fprintf (f, ", agg_preserved");
338 if (jump_func->value.ancestor.type_preserved)
339 fprintf (f, ", type_preserved");
340 fprintf (f, "\n");
341 }
342
343 if (jump_func->agg.items)
344 {
345 struct ipa_agg_jf_item *item;
346 int j;
347
348 fprintf (f, " Aggregate passed by %s:\n",
349 jump_func->agg.by_ref ? "reference" : "value");
350 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
351 {
352 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
353 item->offset);
354 if (TYPE_P (item->value))
355 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
356 tree_to_uhwi (TYPE_SIZE (item->value)));
357 else
358 {
359 fprintf (f, "cst: ");
360 print_generic_expr (f, item->value, 0);
361 }
362 fprintf (f, "\n");
363 }
364 }
365 }
366 }
367
368
369 /* Print the jump functions of all arguments on all call graph edges going from
370 NODE to file F. */
371
372 void
373 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
374 {
375 struct cgraph_edge *cs;
376
377 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
378 node->order);
379 for (cs = node->callees; cs; cs = cs->next_callee)
380 {
381 if (!ipa_edge_args_info_available_for_edge_p (cs))
382 continue;
383
384 fprintf (f, " callsite %s/%i -> %s/%i : \n",
385 xstrdup (node->name ()), node->order,
386 xstrdup (cs->callee->name ()),
387 cs->callee->order);
388 ipa_print_node_jump_functions_for_edge (f, cs);
389 }
390
391 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
392 {
393 struct cgraph_indirect_call_info *ii;
394 if (!ipa_edge_args_info_available_for_edge_p (cs))
395 continue;
396
397 ii = cs->indirect_info;
398 if (ii->agg_contents)
399 fprintf (f, " indirect %s callsite, calling param %i, "
400 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
401 ii->member_ptr ? "member ptr" : "aggregate",
402 ii->param_index, ii->offset,
403 ii->by_ref ? "by reference" : "by_value");
404 else
405 fprintf (f, " indirect %s callsite, calling param %i, "
406 "offset " HOST_WIDE_INT_PRINT_DEC,
407 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
408 ii->offset);
409
410 if (cs->call_stmt)
411 {
412 fprintf (f, ", for stmt ");
413 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
414 }
415 else
416 fprintf (f, "\n");
417 ipa_print_node_jump_functions_for_edge (f, cs);
418 }
419 }
420
421 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
422
423 void
424 ipa_print_all_jump_functions (FILE *f)
425 {
426 struct cgraph_node *node;
427
428 fprintf (f, "\nJump functions:\n");
429 FOR_EACH_FUNCTION (node)
430 {
431 ipa_print_node_jump_functions (f, node);
432 }
433 }
434
435 /* Set JFUNC to be a known type jump function. */
436
437 static void
438 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
439 tree base_type, tree component_type)
440 {
441 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
442 && TYPE_BINFO (component_type));
443 if (!flag_devirtualize)
444 return;
445 gcc_assert (BINFO_VTABLE (TYPE_BINFO (component_type)));
446 jfunc->type = IPA_JF_KNOWN_TYPE;
447 jfunc->value.known_type.offset = offset,
448 jfunc->value.known_type.base_type = base_type;
449 jfunc->value.known_type.component_type = component_type;
450 gcc_assert (component_type);
451 }
452
453 /* Set JFUNC to be a copy of another jmp (to be used by jump function
454 combination code). The two functions will share their rdesc. */
455
456 static void
457 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
458 struct ipa_jump_func *src)
459
460 {
461 gcc_checking_assert (src->type == IPA_JF_CONST);
462 dst->type = IPA_JF_CONST;
463 dst->value.constant = src->value.constant;
464 }
465
466 /* Set JFUNC to be a constant jmp function. */
467
468 static void
469 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
470 struct cgraph_edge *cs)
471 {
472 constant = unshare_expr (constant);
473 if (constant && EXPR_P (constant))
474 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
475 jfunc->type = IPA_JF_CONST;
476 jfunc->value.constant.value = unshare_expr_without_location (constant);
477
478 if (TREE_CODE (constant) == ADDR_EXPR
479 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
480 {
481 struct ipa_cst_ref_desc *rdesc;
482 if (!ipa_refdesc_pool)
483 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
484 sizeof (struct ipa_cst_ref_desc), 32);
485
486 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
487 rdesc->cs = cs;
488 rdesc->next_duplicate = NULL;
489 rdesc->refcount = 1;
490 jfunc->value.constant.rdesc = rdesc;
491 }
492 else
493 jfunc->value.constant.rdesc = NULL;
494 }
495
496 /* Set JFUNC to be a simple pass-through jump function. */
497 static void
498 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
499 bool agg_preserved, bool type_preserved)
500 {
501 jfunc->type = IPA_JF_PASS_THROUGH;
502 jfunc->value.pass_through.operand = NULL_TREE;
503 jfunc->value.pass_through.formal_id = formal_id;
504 jfunc->value.pass_through.operation = NOP_EXPR;
505 jfunc->value.pass_through.agg_preserved = agg_preserved;
506 jfunc->value.pass_through.type_preserved = type_preserved;
507 }
508
509 /* Set JFUNC to be an arithmetic pass through jump function. */
510
511 static void
512 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
513 tree operand, enum tree_code operation)
514 {
515 jfunc->type = IPA_JF_PASS_THROUGH;
516 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
517 jfunc->value.pass_through.formal_id = formal_id;
518 jfunc->value.pass_through.operation = operation;
519 jfunc->value.pass_through.agg_preserved = false;
520 jfunc->value.pass_through.type_preserved = false;
521 }
522
523 /* Set JFUNC to be an ancestor jump function. */
524
525 static void
526 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
527 tree type, int formal_id, bool agg_preserved,
528 bool type_preserved)
529 {
530 if (!flag_devirtualize)
531 type_preserved = false;
532 gcc_assert (!type_preserved
533 || (TREE_CODE (type) == RECORD_TYPE
534 && TYPE_BINFO (type)
535 && BINFO_VTABLE (TYPE_BINFO (type))));
536 jfunc->type = IPA_JF_ANCESTOR;
537 jfunc->value.ancestor.formal_id = formal_id;
538 jfunc->value.ancestor.offset = offset;
539 jfunc->value.ancestor.type = type_preserved ? type : NULL;
540 jfunc->value.ancestor.agg_preserved = agg_preserved;
541 jfunc->value.ancestor.type_preserved = type_preserved;
542 }
543
544 /* Extract the acual BINFO being described by JFUNC which must be a known type
545 jump function. */
546
547 tree
548 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
549 {
550 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
551 if (!base_binfo)
552 return NULL_TREE;
553 return get_binfo_at_offset (base_binfo,
554 jfunc->value.known_type.offset,
555 jfunc->value.known_type.component_type);
556 }
557
558 /* Get IPA BB information about the given BB. FBI is the context of analyzis
559 of this function body. */
560
561 static struct ipa_bb_info *
562 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
563 {
564 gcc_checking_assert (fbi);
565 return &fbi->bb_infos[bb->index];
566 }
567
568 /* Structure to be passed in between detect_type_change and
569 check_stmt_for_type_change. */
570
571 struct type_change_info
572 {
573 /* Offset into the object where there is the virtual method pointer we are
574 looking for. */
575 HOST_WIDE_INT offset;
576 /* The declaration or SSA_NAME pointer of the base that we are checking for
577 type change. */
578 tree object;
579 /* If we actually can tell the type that the object has changed to, it is
580 stored in this field. Otherwise it remains NULL_TREE. */
581 tree known_current_type;
582 /* Set to true if dynamic type change has been detected. */
583 bool type_maybe_changed;
584 /* Set to true if multiple types have been encountered. known_current_type
585 must be disregarded in that case. */
586 bool multiple_types_encountered;
587 };
588
589 /* Return true if STMT can modify a virtual method table pointer.
590
591 This function makes special assumptions about both constructors and
592 destructors which are all the functions that are allowed to alter the VMT
593 pointers. It assumes that destructors begin with assignment into all VMT
594 pointers and that constructors essentially look in the following way:
595
596 1) The very first thing they do is that they call constructors of ancestor
597 sub-objects that have them.
598
599 2) Then VMT pointers of this and all its ancestors is set to new values
600 corresponding to the type corresponding to the constructor.
601
602 3) Only afterwards, other stuff such as constructor of member sub-objects
603 and the code written by the user is run. Only this may include calling
604 virtual functions, directly or indirectly.
605
606 There is no way to call a constructor of an ancestor sub-object in any
607 other way.
608
609 This means that we do not have to care whether constructors get the correct
610 type information because they will always change it (in fact, if we define
611 the type to be given by the VMT pointer, it is undefined).
612
613 The most important fact to derive from the above is that if, for some
614 statement in the section 3, we try to detect whether the dynamic type has
615 changed, we can safely ignore all calls as we examine the function body
616 backwards until we reach statements in section 2 because these calls cannot
617 be ancestor constructors or destructors (if the input is not bogus) and so
618 do not change the dynamic type (this holds true only for automatically
619 allocated objects but at the moment we devirtualize only these). We then
620 must detect that statements in section 2 change the dynamic type and can try
621 to derive the new type. That is enough and we can stop, we will never see
622 the calls into constructors of sub-objects in this code. Therefore we can
623 safely ignore all call statements that we traverse.
624 */
625
626 static bool
627 stmt_may_be_vtbl_ptr_store (gimple stmt)
628 {
629 if (is_gimple_call (stmt))
630 return false;
631 /* TODO: Skip clobbers, doing so triggers problem in PR60306. */
632 else if (is_gimple_assign (stmt))
633 {
634 tree lhs = gimple_assign_lhs (stmt);
635
636 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
637 {
638 if (flag_strict_aliasing
639 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
640 return false;
641
642 if (TREE_CODE (lhs) == COMPONENT_REF
643 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
644 return false;
645 /* In the future we might want to use get_base_ref_and_offset to find
646 if there is a field corresponding to the offset and if so, proceed
647 almost like if it was a component ref. */
648 }
649 }
650 return true;
651 }
652
653 /* If STMT can be proved to be an assignment to the virtual method table
654 pointer of ANALYZED_OBJ and the type associated with the new table
655 identified, return the type. Otherwise return NULL_TREE. */
656
657 static tree
658 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
659 {
660 HOST_WIDE_INT offset, size, max_size;
661 tree lhs, rhs, base, binfo;
662
663 if (!gimple_assign_single_p (stmt))
664 return NULL_TREE;
665
666 lhs = gimple_assign_lhs (stmt);
667 rhs = gimple_assign_rhs1 (stmt);
668 if (TREE_CODE (lhs) != COMPONENT_REF
669 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
670 return NULL_TREE;
671
672 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
673 if (offset != tci->offset
674 || size != POINTER_SIZE
675 || max_size != POINTER_SIZE)
676 return NULL_TREE;
677 if (TREE_CODE (base) == MEM_REF)
678 {
679 if (TREE_CODE (tci->object) != MEM_REF
680 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
681 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
682 TREE_OPERAND (base, 1)))
683 return NULL_TREE;
684 }
685 else if (tci->object != base)
686 return NULL_TREE;
687
688 binfo = vtable_pointer_value_to_binfo (rhs);
689
690 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
691 base of outer type. In this case we would need to either
692 work on binfos or translate it back to outer type and offset.
693 KNOWN_TYPE jump functions are not ready for that, yet. */
694 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
695 return NULL;
696
697 return BINFO_TYPE (binfo);
698 }
699
700 /* Callback of walk_aliased_vdefs and a helper function for
701 detect_type_change to check whether a particular statement may modify
702 the virtual table pointer, and if possible also determine the new type of
703 the (sub-)object. It stores its result into DATA, which points to a
704 type_change_info structure. */
705
706 static bool
707 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
708 {
709 gimple stmt = SSA_NAME_DEF_STMT (vdef);
710 struct type_change_info *tci = (struct type_change_info *) data;
711
712 if (stmt_may_be_vtbl_ptr_store (stmt))
713 {
714 tree type;
715 type = extr_type_from_vtbl_ptr_store (stmt, tci);
716 if (tci->type_maybe_changed
717 && type != tci->known_current_type)
718 tci->multiple_types_encountered = true;
719 tci->known_current_type = type;
720 tci->type_maybe_changed = true;
721 return true;
722 }
723 else
724 return false;
725 }
726
727
728
729 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
730 callsite CALL) by looking for assignments to its virtual table pointer. If
731 it is, return true and fill in the jump function JFUNC with relevant type
732 information or set it to unknown. ARG is the object itself (not a pointer
733 to it, unless dereferenced). BASE is the base of the memory access as
734 returned by get_ref_base_and_extent, as is the offset. */
735
736 static bool
737 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
738 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
739 {
740 struct type_change_info tci;
741 ao_ref ao;
742
743 gcc_checking_assert (DECL_P (arg)
744 || TREE_CODE (arg) == MEM_REF
745 || handled_component_p (arg));
746 /* Const calls cannot call virtual methods through VMT and so type changes do
747 not matter. */
748 if (!flag_devirtualize || !gimple_vuse (call)
749 /* Be sure expected_type is polymorphic. */
750 || !comp_type
751 || TREE_CODE (comp_type) != RECORD_TYPE
752 || !TYPE_BINFO (comp_type)
753 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
754 return true;
755
756 /* C++ methods are not allowed to change THIS pointer unless they
757 are constructors or destructors. */
758 if (TREE_CODE (base) == MEM_REF
759 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
760 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
761 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base, 0))) == PARM_DECL
762 && TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
763 && !DECL_CXX_CONSTRUCTOR_P (current_function_decl)
764 && !DECL_CXX_DESTRUCTOR_P (current_function_decl)
765 && (SSA_NAME_VAR (TREE_OPERAND (base, 0))
766 == DECL_ARGUMENTS (current_function_decl)))
767 return false;
768
769 ao_ref_init (&ao, arg);
770 ao.base = base;
771 ao.offset = offset;
772 ao.size = POINTER_SIZE;
773 ao.max_size = ao.size;
774
775 tci.offset = offset;
776 tci.object = get_base_address (arg);
777 tci.known_current_type = NULL_TREE;
778 tci.type_maybe_changed = false;
779 tci.multiple_types_encountered = false;
780
781 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
782 &tci, NULL);
783 if (!tci.type_maybe_changed)
784 return false;
785
786 if (!tci.known_current_type
787 || tci.multiple_types_encountered
788 || offset != 0)
789 jfunc->type = IPA_JF_UNKNOWN;
790 else
791 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
792
793 return true;
794 }
795
796 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
797 SSA name (its dereference will become the base and the offset is assumed to
798 be zero). */
799
800 static bool
801 detect_type_change_ssa (tree arg, tree comp_type,
802 gimple call, struct ipa_jump_func *jfunc)
803 {
804 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
805 if (!flag_devirtualize
806 || !POINTER_TYPE_P (TREE_TYPE (arg)))
807 return false;
808
809 arg = build2 (MEM_REF, ptr_type_node, arg,
810 build_int_cst (ptr_type_node, 0));
811
812 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
813 }
814
815 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
816 boolean variable pointed to by DATA. */
817
818 static bool
819 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
820 void *data)
821 {
822 bool *b = (bool *) data;
823 *b = true;
824 return true;
825 }
826
827 /* Return true if we have already walked so many statements in AA that we
828 should really just start giving up. */
829
830 static bool
831 aa_overwalked (struct func_body_info *fbi)
832 {
833 gcc_checking_assert (fbi);
834 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
835 }
836
837 /* Find the nearest valid aa status for parameter specified by INDEX that
838 dominates BB. */
839
840 static struct param_aa_status *
841 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
842 int index)
843 {
844 while (true)
845 {
846 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
847 if (!bb)
848 return NULL;
849 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
850 if (!bi->param_aa_statuses.is_empty ()
851 && bi->param_aa_statuses[index].valid)
852 return &bi->param_aa_statuses[index];
853 }
854 }
855
856 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
857 structures and/or intialize the result with a dominating description as
858 necessary. */
859
860 static struct param_aa_status *
861 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
862 int index)
863 {
864 gcc_checking_assert (fbi);
865 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
866 if (bi->param_aa_statuses.is_empty ())
867 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
868 struct param_aa_status *paa = &bi->param_aa_statuses[index];
869 if (!paa->valid)
870 {
871 gcc_checking_assert (!paa->parm_modified
872 && !paa->ref_modified
873 && !paa->pt_modified);
874 struct param_aa_status *dom_paa;
875 dom_paa = find_dominating_aa_status (fbi, bb, index);
876 if (dom_paa)
877 *paa = *dom_paa;
878 else
879 paa->valid = true;
880 }
881
882 return paa;
883 }
884
885 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
886 a value known not to be modified in this function before reaching the
887 statement STMT. FBI holds information about the function we have so far
888 gathered but do not survive the summary building stage. */
889
890 static bool
891 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
892 gimple stmt, tree parm_load)
893 {
894 struct param_aa_status *paa;
895 bool modified = false;
896 ao_ref refd;
897
898 /* FIXME: FBI can be NULL if we are being called from outside
899 ipa_node_analysis or ipcp_transform_function, which currently happens
900 during inlining analysis. It would be great to extend fbi's lifetime and
901 always have it. Currently, we are just not afraid of too much walking in
902 that case. */
903 if (fbi)
904 {
905 if (aa_overwalked (fbi))
906 return false;
907 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
908 if (paa->parm_modified)
909 return false;
910 }
911 else
912 paa = NULL;
913
914 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
915 ao_ref_init (&refd, parm_load);
916 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
917 &modified, NULL);
918 if (fbi)
919 fbi->aa_walked += walked;
920 if (paa && modified)
921 paa->parm_modified = true;
922 return !modified;
923 }
924
925 /* If STMT is an assignment that loads a value from an parameter declaration,
926 return the index of the parameter in ipa_node_params which has not been
927 modified. Otherwise return -1. */
928
929 static int
930 load_from_unmodified_param (struct func_body_info *fbi,
931 vec<ipa_param_descriptor> descriptors,
932 gimple stmt)
933 {
934 int index;
935 tree op1;
936
937 if (!gimple_assign_single_p (stmt))
938 return -1;
939
940 op1 = gimple_assign_rhs1 (stmt);
941 if (TREE_CODE (op1) != PARM_DECL)
942 return -1;
943
944 index = ipa_get_param_decl_index_1 (descriptors, op1);
945 if (index < 0
946 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
947 return -1;
948
949 return index;
950 }
951
952 /* Return true if memory reference REF (which must be a load through parameter
953 with INDEX) loads data that are known to be unmodified in this function
954 before reaching statement STMT. */
955
956 static bool
957 parm_ref_data_preserved_p (struct func_body_info *fbi,
958 int index, gimple stmt, tree ref)
959 {
960 struct param_aa_status *paa;
961 bool modified = false;
962 ao_ref refd;
963
964 /* FIXME: FBI can be NULL if we are being called from outside
965 ipa_node_analysis or ipcp_transform_function, which currently happens
966 during inlining analysis. It would be great to extend fbi's lifetime and
967 always have it. Currently, we are just not afraid of too much walking in
968 that case. */
969 if (fbi)
970 {
971 if (aa_overwalked (fbi))
972 return false;
973 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
974 if (paa->ref_modified)
975 return false;
976 }
977 else
978 paa = NULL;
979
980 gcc_checking_assert (gimple_vuse (stmt));
981 ao_ref_init (&refd, ref);
982 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
983 &modified, NULL);
984 if (fbi)
985 fbi->aa_walked += walked;
986 if (paa && modified)
987 paa->ref_modified = true;
988 return !modified;
989 }
990
991 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
992 is known to be unmodified in this function before reaching call statement
993 CALL into which it is passed. FBI describes the function body. */
994
995 static bool
996 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
997 gimple call, tree parm)
998 {
999 bool modified = false;
1000 ao_ref refd;
1001
1002 /* It's unnecessary to calculate anything about memory contnets for a const
1003 function because it is not goin to use it. But do not cache the result
1004 either. Also, no such calculations for non-pointers. */
1005 if (!gimple_vuse (call)
1006 || !POINTER_TYPE_P (TREE_TYPE (parm))
1007 || aa_overwalked (fbi))
1008 return false;
1009
1010 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1011 index);
1012 if (paa->pt_modified)
1013 return false;
1014
1015 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1016 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1017 &modified, NULL);
1018 fbi->aa_walked += walked;
1019 if (modified)
1020 paa->pt_modified = true;
1021 return !modified;
1022 }
1023
1024 /* Return true if we can prove that OP is a memory reference loading unmodified
1025 data from an aggregate passed as a parameter and if the aggregate is passed
1026 by reference, that the alias type of the load corresponds to the type of the
1027 formal parameter (so that we can rely on this type for TBAA in callers).
1028 INFO and PARMS_AINFO describe parameters of the current function (but the
1029 latter can be NULL), STMT is the load statement. If function returns true,
1030 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1031 within the aggregate and whether it is a load from a value passed by
1032 reference respectively. */
1033
1034 static bool
1035 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1036 vec<ipa_param_descriptor> descriptors,
1037 gimple stmt, tree op, int *index_p,
1038 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1039 bool *by_ref_p)
1040 {
1041 int index;
1042 HOST_WIDE_INT size, max_size;
1043 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1044
1045 if (max_size == -1 || max_size != size || *offset_p < 0)
1046 return false;
1047
1048 if (DECL_P (base))
1049 {
1050 int index = ipa_get_param_decl_index_1 (descriptors, base);
1051 if (index >= 0
1052 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1053 {
1054 *index_p = index;
1055 *by_ref_p = false;
1056 if (size_p)
1057 *size_p = size;
1058 return true;
1059 }
1060 return false;
1061 }
1062
1063 if (TREE_CODE (base) != MEM_REF
1064 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1065 || !integer_zerop (TREE_OPERAND (base, 1)))
1066 return false;
1067
1068 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1069 {
1070 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1071 index = ipa_get_param_decl_index_1 (descriptors, parm);
1072 }
1073 else
1074 {
1075 /* This branch catches situations where a pointer parameter is not a
1076 gimple register, for example:
1077
1078 void hip7(S*) (struct S * p)
1079 {
1080 void (*<T2e4>) (struct S *) D.1867;
1081 struct S * p.1;
1082
1083 <bb 2>:
1084 p.1_1 = p;
1085 D.1867_2 = p.1_1->f;
1086 D.1867_2 ();
1087 gdp = &p;
1088 */
1089
1090 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1091 index = load_from_unmodified_param (fbi, descriptors, def);
1092 }
1093
1094 if (index >= 0
1095 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1096 {
1097 *index_p = index;
1098 *by_ref_p = true;
1099 if (size_p)
1100 *size_p = size;
1101 return true;
1102 }
1103 return false;
1104 }
1105
1106 /* Just like the previous function, just without the param_analysis_info
1107 pointer, for users outside of this file. */
1108
1109 bool
1110 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1111 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1112 bool *by_ref_p)
1113 {
1114 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1115 offset_p, NULL, by_ref_p);
1116 }
1117
1118 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1119 of an assignment statement STMT, try to determine whether we are actually
1120 handling any of the following cases and construct an appropriate jump
1121 function into JFUNC if so:
1122
1123 1) The passed value is loaded from a formal parameter which is not a gimple
1124 register (most probably because it is addressable, the value has to be
1125 scalar) and we can guarantee the value has not changed. This case can
1126 therefore be described by a simple pass-through jump function. For example:
1127
1128 foo (int a)
1129 {
1130 int a.0;
1131
1132 a.0_2 = a;
1133 bar (a.0_2);
1134
1135 2) The passed value can be described by a simple arithmetic pass-through
1136 jump function. E.g.
1137
1138 foo (int a)
1139 {
1140 int D.2064;
1141
1142 D.2064_4 = a.1(D) + 4;
1143 bar (D.2064_4);
1144
1145 This case can also occur in combination of the previous one, e.g.:
1146
1147 foo (int a, int z)
1148 {
1149 int a.0;
1150 int D.2064;
1151
1152 a.0_3 = a;
1153 D.2064_4 = a.0_3 + 4;
1154 foo (D.2064_4);
1155
1156 3) The passed value is an address of an object within another one (which
1157 also passed by reference). Such situations are described by an ancestor
1158 jump function and describe situations such as:
1159
1160 B::foo() (struct B * const this)
1161 {
1162 struct A * D.1845;
1163
1164 D.1845_2 = &this_1(D)->D.1748;
1165 A::bar (D.1845_2);
1166
1167 INFO is the structure describing individual parameters access different
1168 stages of IPA optimizations. PARMS_AINFO contains the information that is
1169 only needed for intraprocedural analysis. */
1170
1171 static void
1172 compute_complex_assign_jump_func (struct func_body_info *fbi,
1173 struct ipa_node_params *info,
1174 struct ipa_jump_func *jfunc,
1175 gimple call, gimple stmt, tree name,
1176 tree param_type)
1177 {
1178 HOST_WIDE_INT offset, size, max_size;
1179 tree op1, tc_ssa, base, ssa;
1180 int index;
1181
1182 op1 = gimple_assign_rhs1 (stmt);
1183
1184 if (TREE_CODE (op1) == SSA_NAME)
1185 {
1186 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1187 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1188 else
1189 index = load_from_unmodified_param (fbi, info->descriptors,
1190 SSA_NAME_DEF_STMT (op1));
1191 tc_ssa = op1;
1192 }
1193 else
1194 {
1195 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1196 tc_ssa = gimple_assign_lhs (stmt);
1197 }
1198
1199 if (index >= 0)
1200 {
1201 tree op2 = gimple_assign_rhs2 (stmt);
1202
1203 if (op2)
1204 {
1205 if (!is_gimple_ip_invariant (op2)
1206 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1207 && !useless_type_conversion_p (TREE_TYPE (name),
1208 TREE_TYPE (op1))))
1209 return;
1210
1211 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1212 gimple_assign_rhs_code (stmt));
1213 }
1214 else if (gimple_assign_single_p (stmt))
1215 {
1216 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1217 bool type_p = false;
1218
1219 if (param_type && POINTER_TYPE_P (param_type))
1220 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1221 call, jfunc);
1222 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1223 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1224 }
1225 return;
1226 }
1227
1228 if (TREE_CODE (op1) != ADDR_EXPR)
1229 return;
1230 op1 = TREE_OPERAND (op1, 0);
1231 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1232 return;
1233 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1234 if (TREE_CODE (base) != MEM_REF
1235 /* If this is a varying address, punt. */
1236 || max_size == -1
1237 || max_size != size)
1238 return;
1239 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1240 ssa = TREE_OPERAND (base, 0);
1241 if (TREE_CODE (ssa) != SSA_NAME
1242 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1243 || offset < 0)
1244 return;
1245
1246 /* Dynamic types are changed in constructors and destructors. */
1247 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1248 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1249 {
1250 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1251 call, jfunc, offset);
1252 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1253 ipa_set_ancestor_jf (jfunc, offset,
1254 type_p ? TREE_TYPE (param_type) : NULL, index,
1255 parm_ref_data_pass_through_p (fbi, index,
1256 call, ssa), type_p);
1257 }
1258 }
1259
1260 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1261 it looks like:
1262
1263 iftmp.1_3 = &obj_2(D)->D.1762;
1264
1265 The base of the MEM_REF must be a default definition SSA NAME of a
1266 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1267 whole MEM_REF expression is returned and the offset calculated from any
1268 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1269 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1270
1271 static tree
1272 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1273 {
1274 HOST_WIDE_INT size, max_size;
1275 tree expr, parm, obj;
1276
1277 if (!gimple_assign_single_p (assign))
1278 return NULL_TREE;
1279 expr = gimple_assign_rhs1 (assign);
1280
1281 if (TREE_CODE (expr) != ADDR_EXPR)
1282 return NULL_TREE;
1283 expr = TREE_OPERAND (expr, 0);
1284 obj = expr;
1285 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1286
1287 if (TREE_CODE (expr) != MEM_REF
1288 /* If this is a varying address, punt. */
1289 || max_size == -1
1290 || max_size != size
1291 || *offset < 0)
1292 return NULL_TREE;
1293 parm = TREE_OPERAND (expr, 0);
1294 if (TREE_CODE (parm) != SSA_NAME
1295 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1296 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1297 return NULL_TREE;
1298
1299 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1300 *obj_p = obj;
1301 return expr;
1302 }
1303
1304
1305 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1306 statement PHI, try to find out whether NAME is in fact a
1307 multiple-inheritance typecast from a descendant into an ancestor of a formal
1308 parameter and thus can be described by an ancestor jump function and if so,
1309 write the appropriate function into JFUNC.
1310
1311 Essentially we want to match the following pattern:
1312
1313 if (obj_2(D) != 0B)
1314 goto <bb 3>;
1315 else
1316 goto <bb 4>;
1317
1318 <bb 3>:
1319 iftmp.1_3 = &obj_2(D)->D.1762;
1320
1321 <bb 4>:
1322 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1323 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1324 return D.1879_6; */
1325
1326 static void
1327 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1328 struct ipa_node_params *info,
1329 struct ipa_jump_func *jfunc,
1330 gimple call, gimple phi, tree param_type)
1331 {
1332 HOST_WIDE_INT offset;
1333 gimple assign, cond;
1334 basic_block phi_bb, assign_bb, cond_bb;
1335 tree tmp, parm, expr, obj;
1336 int index, i;
1337
1338 if (gimple_phi_num_args (phi) != 2)
1339 return;
1340
1341 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1342 tmp = PHI_ARG_DEF (phi, 0);
1343 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1344 tmp = PHI_ARG_DEF (phi, 1);
1345 else
1346 return;
1347 if (TREE_CODE (tmp) != SSA_NAME
1348 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1349 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1350 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1351 return;
1352
1353 assign = SSA_NAME_DEF_STMT (tmp);
1354 assign_bb = gimple_bb (assign);
1355 if (!single_pred_p (assign_bb))
1356 return;
1357 expr = get_ancestor_addr_info (assign, &obj, &offset);
1358 if (!expr)
1359 return;
1360 parm = TREE_OPERAND (expr, 0);
1361 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1362 if (index < 0)
1363 return;
1364
1365 cond_bb = single_pred (assign_bb);
1366 cond = last_stmt (cond_bb);
1367 if (!cond
1368 || gimple_code (cond) != GIMPLE_COND
1369 || gimple_cond_code (cond) != NE_EXPR
1370 || gimple_cond_lhs (cond) != parm
1371 || !integer_zerop (gimple_cond_rhs (cond)))
1372 return;
1373
1374 phi_bb = gimple_bb (phi);
1375 for (i = 0; i < 2; i++)
1376 {
1377 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1378 if (pred != assign_bb && pred != cond_bb)
1379 return;
1380 }
1381
1382 bool type_p = false;
1383 if (param_type && POINTER_TYPE_P (param_type))
1384 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1385 call, jfunc, offset);
1386 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1387 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1388 index,
1389 parm_ref_data_pass_through_p (fbi, index, call, parm),
1390 type_p);
1391 }
1392
1393 /* Given OP which is passed as an actual argument to a called function,
1394 determine if it is possible to construct a KNOWN_TYPE jump function for it
1395 and if so, create one and store it to JFUNC.
1396 EXPECTED_TYPE represents a type the argument should be in */
1397
1398 static void
1399 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1400 gimple call, tree expected_type)
1401 {
1402 HOST_WIDE_INT offset, size, max_size;
1403 tree base;
1404
1405 if (!flag_devirtualize
1406 || TREE_CODE (op) != ADDR_EXPR
1407 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1408 /* Be sure expected_type is polymorphic. */
1409 || !expected_type
1410 || TREE_CODE (expected_type) != RECORD_TYPE
1411 || !TYPE_BINFO (expected_type)
1412 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1413 return;
1414
1415 op = TREE_OPERAND (op, 0);
1416 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1417 if (!DECL_P (base)
1418 || max_size == -1
1419 || max_size != size
1420 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1421 || is_global_var (base))
1422 return;
1423
1424 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1425 return;
1426
1427 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1428 expected_type);
1429 }
1430
1431 /* Inspect the given TYPE and return true iff it has the same structure (the
1432 same number of fields of the same types) as a C++ member pointer. If
1433 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1434 corresponding fields there. */
1435
1436 static bool
1437 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1438 {
1439 tree fld;
1440
1441 if (TREE_CODE (type) != RECORD_TYPE)
1442 return false;
1443
1444 fld = TYPE_FIELDS (type);
1445 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1446 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1447 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1448 return false;
1449
1450 if (method_ptr)
1451 *method_ptr = fld;
1452
1453 fld = DECL_CHAIN (fld);
1454 if (!fld || INTEGRAL_TYPE_P (fld)
1455 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1456 return false;
1457 if (delta)
1458 *delta = fld;
1459
1460 if (DECL_CHAIN (fld))
1461 return false;
1462
1463 return true;
1464 }
1465
1466 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1467 return the rhs of its defining statement. Otherwise return RHS as it
1468 is. */
1469
1470 static inline tree
1471 get_ssa_def_if_simple_copy (tree rhs)
1472 {
1473 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1474 {
1475 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1476
1477 if (gimple_assign_single_p (def_stmt))
1478 rhs = gimple_assign_rhs1 (def_stmt);
1479 else
1480 break;
1481 }
1482 return rhs;
1483 }
1484
1485 /* Simple linked list, describing known contents of an aggregate beforere
1486 call. */
1487
1488 struct ipa_known_agg_contents_list
1489 {
1490 /* Offset and size of the described part of the aggregate. */
1491 HOST_WIDE_INT offset, size;
1492 /* Known constant value or NULL if the contents is known to be unknown. */
1493 tree constant;
1494 /* Pointer to the next structure in the list. */
1495 struct ipa_known_agg_contents_list *next;
1496 };
1497
1498 /* Find the proper place in linked list of ipa_known_agg_contents_list
1499 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1500 unless there is a partial overlap, in which case return NULL, or such
1501 element is already there, in which case set *ALREADY_THERE to true. */
1502
1503 static struct ipa_known_agg_contents_list **
1504 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1505 HOST_WIDE_INT lhs_offset,
1506 HOST_WIDE_INT lhs_size,
1507 bool *already_there)
1508 {
1509 struct ipa_known_agg_contents_list **p = list;
1510 while (*p && (*p)->offset < lhs_offset)
1511 {
1512 if ((*p)->offset + (*p)->size > lhs_offset)
1513 return NULL;
1514 p = &(*p)->next;
1515 }
1516
1517 if (*p && (*p)->offset < lhs_offset + lhs_size)
1518 {
1519 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1520 /* We already know this value is subsequently overwritten with
1521 something else. */
1522 *already_there = true;
1523 else
1524 /* Otherwise this is a partial overlap which we cannot
1525 represent. */
1526 return NULL;
1527 }
1528 return p;
1529 }
1530
1531 /* Build aggregate jump function from LIST, assuming there are exactly
1532 CONST_COUNT constant entries there and that th offset of the passed argument
1533 is ARG_OFFSET and store it into JFUNC. */
1534
1535 static void
1536 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1537 int const_count, HOST_WIDE_INT arg_offset,
1538 struct ipa_jump_func *jfunc)
1539 {
1540 vec_alloc (jfunc->agg.items, const_count);
1541 while (list)
1542 {
1543 if (list->constant)
1544 {
1545 struct ipa_agg_jf_item item;
1546 item.offset = list->offset - arg_offset;
1547 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1548 item.value = unshare_expr_without_location (list->constant);
1549 jfunc->agg.items->quick_push (item);
1550 }
1551 list = list->next;
1552 }
1553 }
1554
1555 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1556 in ARG is filled in with constant values. ARG can either be an aggregate
1557 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1558 aggregate. JFUNC is the jump function into which the constants are
1559 subsequently stored. */
1560
1561 static void
1562 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1563 struct ipa_jump_func *jfunc)
1564 {
1565 struct ipa_known_agg_contents_list *list = NULL;
1566 int item_count = 0, const_count = 0;
1567 HOST_WIDE_INT arg_offset, arg_size;
1568 gimple_stmt_iterator gsi;
1569 tree arg_base;
1570 bool check_ref, by_ref;
1571 ao_ref r;
1572
1573 /* The function operates in three stages. First, we prepare check_ref, r,
1574 arg_base and arg_offset based on what is actually passed as an actual
1575 argument. */
1576
1577 if (POINTER_TYPE_P (arg_type))
1578 {
1579 by_ref = true;
1580 if (TREE_CODE (arg) == SSA_NAME)
1581 {
1582 tree type_size;
1583 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1584 return;
1585 check_ref = true;
1586 arg_base = arg;
1587 arg_offset = 0;
1588 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1589 arg_size = tree_to_uhwi (type_size);
1590 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1591 }
1592 else if (TREE_CODE (arg) == ADDR_EXPR)
1593 {
1594 HOST_WIDE_INT arg_max_size;
1595
1596 arg = TREE_OPERAND (arg, 0);
1597 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1598 &arg_max_size);
1599 if (arg_max_size == -1
1600 || arg_max_size != arg_size
1601 || arg_offset < 0)
1602 return;
1603 if (DECL_P (arg_base))
1604 {
1605 check_ref = false;
1606 ao_ref_init (&r, arg_base);
1607 }
1608 else
1609 return;
1610 }
1611 else
1612 return;
1613 }
1614 else
1615 {
1616 HOST_WIDE_INT arg_max_size;
1617
1618 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1619
1620 by_ref = false;
1621 check_ref = false;
1622 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1623 &arg_max_size);
1624 if (arg_max_size == -1
1625 || arg_max_size != arg_size
1626 || arg_offset < 0)
1627 return;
1628
1629 ao_ref_init (&r, arg);
1630 }
1631
1632 /* Second stage walks back the BB, looks at individual statements and as long
1633 as it is confident of how the statements affect contents of the
1634 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1635 describing it. */
1636 gsi = gsi_for_stmt (call);
1637 gsi_prev (&gsi);
1638 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1639 {
1640 struct ipa_known_agg_contents_list *n, **p;
1641 gimple stmt = gsi_stmt (gsi);
1642 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1643 tree lhs, rhs, lhs_base;
1644
1645 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1646 continue;
1647 if (!gimple_assign_single_p (stmt))
1648 break;
1649
1650 lhs = gimple_assign_lhs (stmt);
1651 rhs = gimple_assign_rhs1 (stmt);
1652 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1653 || TREE_CODE (lhs) == BIT_FIELD_REF
1654 || contains_bitfld_component_ref_p (lhs))
1655 break;
1656
1657 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1658 &lhs_max_size);
1659 if (lhs_max_size == -1
1660 || lhs_max_size != lhs_size)
1661 break;
1662
1663 if (check_ref)
1664 {
1665 if (TREE_CODE (lhs_base) != MEM_REF
1666 || TREE_OPERAND (lhs_base, 0) != arg_base
1667 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1668 break;
1669 }
1670 else if (lhs_base != arg_base)
1671 {
1672 if (DECL_P (lhs_base))
1673 continue;
1674 else
1675 break;
1676 }
1677
1678 bool already_there = false;
1679 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1680 &already_there);
1681 if (!p)
1682 break;
1683 if (already_there)
1684 continue;
1685
1686 rhs = get_ssa_def_if_simple_copy (rhs);
1687 n = XALLOCA (struct ipa_known_agg_contents_list);
1688 n->size = lhs_size;
1689 n->offset = lhs_offset;
1690 if (is_gimple_ip_invariant (rhs))
1691 {
1692 n->constant = rhs;
1693 const_count++;
1694 }
1695 else
1696 n->constant = NULL_TREE;
1697 n->next = *p;
1698 *p = n;
1699
1700 item_count++;
1701 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1702 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1703 break;
1704 }
1705
1706 /* Third stage just goes over the list and creates an appropriate vector of
1707 ipa_agg_jf_item structures out of it, of sourse only if there are
1708 any known constants to begin with. */
1709
1710 if (const_count)
1711 {
1712 jfunc->agg.by_ref = by_ref;
1713 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1714 }
1715 }
1716
1717 static tree
1718 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1719 {
1720 int n;
1721 tree type = (e->callee
1722 ? TREE_TYPE (e->callee->decl)
1723 : gimple_call_fntype (e->call_stmt));
1724 tree t = TYPE_ARG_TYPES (type);
1725
1726 for (n = 0; n < i; n++)
1727 {
1728 if (!t)
1729 break;
1730 t = TREE_CHAIN (t);
1731 }
1732 if (t)
1733 return TREE_VALUE (t);
1734 if (!e->callee)
1735 return NULL;
1736 t = DECL_ARGUMENTS (e->callee->decl);
1737 for (n = 0; n < i; n++)
1738 {
1739 if (!t)
1740 return NULL;
1741 t = TREE_CHAIN (t);
1742 }
1743 if (t)
1744 return TREE_TYPE (t);
1745 return NULL;
1746 }
1747
1748 /* Compute jump function for all arguments of callsite CS and insert the
1749 information in the jump_functions array in the ipa_edge_args corresponding
1750 to this callsite. */
1751
1752 static void
1753 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1754 struct cgraph_edge *cs)
1755 {
1756 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1757 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1758 gimple call = cs->call_stmt;
1759 int n, arg_num = gimple_call_num_args (call);
1760
1761 if (arg_num == 0 || args->jump_functions)
1762 return;
1763 vec_safe_grow_cleared (args->jump_functions, arg_num);
1764
1765 if (gimple_call_internal_p (call))
1766 return;
1767 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1768 return;
1769
1770 for (n = 0; n < arg_num; n++)
1771 {
1772 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1773 tree arg = gimple_call_arg (call, n);
1774 tree param_type = ipa_get_callee_param_type (cs, n);
1775
1776 if (is_gimple_ip_invariant (arg))
1777 ipa_set_jf_constant (jfunc, arg, cs);
1778 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1779 && TREE_CODE (arg) == PARM_DECL)
1780 {
1781 int index = ipa_get_param_decl_index (info, arg);
1782
1783 gcc_assert (index >=0);
1784 /* Aggregate passed by value, check for pass-through, otherwise we
1785 will attempt to fill in aggregate contents later in this
1786 for cycle. */
1787 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1788 {
1789 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1790 continue;
1791 }
1792 }
1793 else if (TREE_CODE (arg) == SSA_NAME)
1794 {
1795 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1796 {
1797 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1798 if (index >= 0)
1799 {
1800 bool agg_p, type_p;
1801 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1802 if (param_type && POINTER_TYPE_P (param_type))
1803 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1804 call, jfunc);
1805 else
1806 type_p = false;
1807 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1808 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1809 type_p);
1810 }
1811 }
1812 else
1813 {
1814 gimple stmt = SSA_NAME_DEF_STMT (arg);
1815 if (is_gimple_assign (stmt))
1816 compute_complex_assign_jump_func (fbi, info, jfunc,
1817 call, stmt, arg, param_type);
1818 else if (gimple_code (stmt) == GIMPLE_PHI)
1819 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1820 call, stmt, param_type);
1821 }
1822 }
1823 else
1824 compute_known_type_jump_func (arg, jfunc, call,
1825 param_type
1826 && POINTER_TYPE_P (param_type)
1827 ? TREE_TYPE (param_type)
1828 : NULL);
1829
1830 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1831 passed (because type conversions are ignored in gimple). Usually we can
1832 safely get type from function declaration, but in case of K&R prototypes or
1833 variadic functions we can try our luck with type of the pointer passed.
1834 TODO: Since we look for actual initialization of the memory object, we may better
1835 work out the type based on the memory stores we find. */
1836 if (!param_type)
1837 param_type = TREE_TYPE (arg);
1838
1839 if ((jfunc->type != IPA_JF_PASS_THROUGH
1840 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1841 && (jfunc->type != IPA_JF_ANCESTOR
1842 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1843 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1844 || POINTER_TYPE_P (param_type)))
1845 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1846 }
1847 }
1848
1849 /* Compute jump functions for all edges - both direct and indirect - outgoing
1850 from BB. */
1851
1852 static void
1853 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1854 {
1855 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1856 int i;
1857 struct cgraph_edge *cs;
1858
1859 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1860 {
1861 struct cgraph_node *callee = cs->callee;
1862
1863 if (callee)
1864 {
1865 cgraph_function_or_thunk_node (callee, NULL);
1866 /* We do not need to bother analyzing calls to unknown functions
1867 unless they may become known during lto/whopr. */
1868 if (!callee->definition && !flag_lto)
1869 continue;
1870 }
1871 ipa_compute_jump_functions_for_edge (fbi, cs);
1872 }
1873 }
1874
1875 /* If STMT looks like a statement loading a value from a member pointer formal
1876 parameter, return that parameter and store the offset of the field to
1877 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1878 might be clobbered). If USE_DELTA, then we look for a use of the delta
1879 field rather than the pfn. */
1880
1881 static tree
1882 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1883 HOST_WIDE_INT *offset_p)
1884 {
1885 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1886
1887 if (!gimple_assign_single_p (stmt))
1888 return NULL_TREE;
1889
1890 rhs = gimple_assign_rhs1 (stmt);
1891 if (TREE_CODE (rhs) == COMPONENT_REF)
1892 {
1893 ref_field = TREE_OPERAND (rhs, 1);
1894 rhs = TREE_OPERAND (rhs, 0);
1895 }
1896 else
1897 ref_field = NULL_TREE;
1898 if (TREE_CODE (rhs) != MEM_REF)
1899 return NULL_TREE;
1900 rec = TREE_OPERAND (rhs, 0);
1901 if (TREE_CODE (rec) != ADDR_EXPR)
1902 return NULL_TREE;
1903 rec = TREE_OPERAND (rec, 0);
1904 if (TREE_CODE (rec) != PARM_DECL
1905 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1906 return NULL_TREE;
1907 ref_offset = TREE_OPERAND (rhs, 1);
1908
1909 if (use_delta)
1910 fld = delta_field;
1911 else
1912 fld = ptr_field;
1913 if (offset_p)
1914 *offset_p = int_bit_position (fld);
1915
1916 if (ref_field)
1917 {
1918 if (integer_nonzerop (ref_offset))
1919 return NULL_TREE;
1920 return ref_field == fld ? rec : NULL_TREE;
1921 }
1922 else
1923 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1924 : NULL_TREE;
1925 }
1926
1927 /* Returns true iff T is an SSA_NAME defined by a statement. */
1928
1929 static bool
1930 ipa_is_ssa_with_stmt_def (tree t)
1931 {
1932 if (TREE_CODE (t) == SSA_NAME
1933 && !SSA_NAME_IS_DEFAULT_DEF (t))
1934 return true;
1935 else
1936 return false;
1937 }
1938
1939 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1940 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1941 indirect call graph edge. */
1942
1943 static struct cgraph_edge *
1944 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1945 {
1946 struct cgraph_edge *cs;
1947
1948 cs = cgraph_edge (node, stmt);
1949 cs->indirect_info->param_index = param_index;
1950 cs->indirect_info->agg_contents = 0;
1951 cs->indirect_info->member_ptr = 0;
1952 return cs;
1953 }
1954
1955 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1956 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1957 intermediate information about each formal parameter. Currently it checks
1958 whether the call calls a pointer that is a formal parameter and if so, the
1959 parameter is marked with the called flag and an indirect call graph edge
1960 describing the call is created. This is very simple for ordinary pointers
1961 represented in SSA but not-so-nice when it comes to member pointers. The
1962 ugly part of this function does nothing more than trying to match the
1963 pattern of such a call. An example of such a pattern is the gimple dump
1964 below, the call is on the last line:
1965
1966 <bb 2>:
1967 f$__delta_5 = f.__delta;
1968 f$__pfn_24 = f.__pfn;
1969
1970 or
1971 <bb 2>:
1972 f$__delta_5 = MEM[(struct *)&f];
1973 f$__pfn_24 = MEM[(struct *)&f + 4B];
1974
1975 and a few lines below:
1976
1977 <bb 5>
1978 D.2496_3 = (int) f$__pfn_24;
1979 D.2497_4 = D.2496_3 & 1;
1980 if (D.2497_4 != 0)
1981 goto <bb 3>;
1982 else
1983 goto <bb 4>;
1984
1985 <bb 6>:
1986 D.2500_7 = (unsigned int) f$__delta_5;
1987 D.2501_8 = &S + D.2500_7;
1988 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1989 D.2503_10 = *D.2502_9;
1990 D.2504_12 = f$__pfn_24 + -1;
1991 D.2505_13 = (unsigned int) D.2504_12;
1992 D.2506_14 = D.2503_10 + D.2505_13;
1993 D.2507_15 = *D.2506_14;
1994 iftmp.11_16 = (String:: *) D.2507_15;
1995
1996 <bb 7>:
1997 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1998 D.2500_19 = (unsigned int) f$__delta_5;
1999 D.2508_20 = &S + D.2500_19;
2000 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2001
2002 Such patterns are results of simple calls to a member pointer:
2003
2004 int doprinting (int (MyString::* f)(int) const)
2005 {
2006 MyString S ("somestring");
2007
2008 return (S.*f)(4);
2009 }
2010
2011 Moreover, the function also looks for called pointers loaded from aggregates
2012 passed by value or reference. */
2013
2014 static void
2015 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2016 tree target)
2017 {
2018 struct ipa_node_params *info = fbi->info;
2019 HOST_WIDE_INT offset;
2020 bool by_ref;
2021
2022 if (SSA_NAME_IS_DEFAULT_DEF (target))
2023 {
2024 tree var = SSA_NAME_VAR (target);
2025 int index = ipa_get_param_decl_index (info, var);
2026 if (index >= 0)
2027 ipa_note_param_call (fbi->node, index, call);
2028 return;
2029 }
2030
2031 int index;
2032 gimple def = SSA_NAME_DEF_STMT (target);
2033 if (gimple_assign_single_p (def)
2034 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2035 gimple_assign_rhs1 (def), &index, &offset,
2036 NULL, &by_ref))
2037 {
2038 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2039 if (cs->indirect_info->offset != offset)
2040 cs->indirect_info->outer_type = NULL;
2041 cs->indirect_info->offset = offset;
2042 cs->indirect_info->agg_contents = 1;
2043 cs->indirect_info->by_ref = by_ref;
2044 return;
2045 }
2046
2047 /* Now we need to try to match the complex pattern of calling a member
2048 pointer. */
2049 if (gimple_code (def) != GIMPLE_PHI
2050 || gimple_phi_num_args (def) != 2
2051 || !POINTER_TYPE_P (TREE_TYPE (target))
2052 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2053 return;
2054
2055 /* First, we need to check whether one of these is a load from a member
2056 pointer that is a parameter to this function. */
2057 tree n1 = PHI_ARG_DEF (def, 0);
2058 tree n2 = PHI_ARG_DEF (def, 1);
2059 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2060 return;
2061 gimple d1 = SSA_NAME_DEF_STMT (n1);
2062 gimple d2 = SSA_NAME_DEF_STMT (n2);
2063
2064 tree rec;
2065 basic_block bb, virt_bb;
2066 basic_block join = gimple_bb (def);
2067 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2068 {
2069 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2070 return;
2071
2072 bb = EDGE_PRED (join, 0)->src;
2073 virt_bb = gimple_bb (d2);
2074 }
2075 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2076 {
2077 bb = EDGE_PRED (join, 1)->src;
2078 virt_bb = gimple_bb (d1);
2079 }
2080 else
2081 return;
2082
2083 /* Second, we need to check that the basic blocks are laid out in the way
2084 corresponding to the pattern. */
2085
2086 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2087 || single_pred (virt_bb) != bb
2088 || single_succ (virt_bb) != join)
2089 return;
2090
2091 /* Third, let's see that the branching is done depending on the least
2092 significant bit of the pfn. */
2093
2094 gimple branch = last_stmt (bb);
2095 if (!branch || gimple_code (branch) != GIMPLE_COND)
2096 return;
2097
2098 if ((gimple_cond_code (branch) != NE_EXPR
2099 && gimple_cond_code (branch) != EQ_EXPR)
2100 || !integer_zerop (gimple_cond_rhs (branch)))
2101 return;
2102
2103 tree cond = gimple_cond_lhs (branch);
2104 if (!ipa_is_ssa_with_stmt_def (cond))
2105 return;
2106
2107 def = SSA_NAME_DEF_STMT (cond);
2108 if (!is_gimple_assign (def)
2109 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2110 || !integer_onep (gimple_assign_rhs2 (def)))
2111 return;
2112
2113 cond = gimple_assign_rhs1 (def);
2114 if (!ipa_is_ssa_with_stmt_def (cond))
2115 return;
2116
2117 def = SSA_NAME_DEF_STMT (cond);
2118
2119 if (is_gimple_assign (def)
2120 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2121 {
2122 cond = gimple_assign_rhs1 (def);
2123 if (!ipa_is_ssa_with_stmt_def (cond))
2124 return;
2125 def = SSA_NAME_DEF_STMT (cond);
2126 }
2127
2128 tree rec2;
2129 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2130 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2131 == ptrmemfunc_vbit_in_delta),
2132 NULL);
2133 if (rec != rec2)
2134 return;
2135
2136 index = ipa_get_param_decl_index (info, rec);
2137 if (index >= 0
2138 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2139 {
2140 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2141 if (cs->indirect_info->offset != offset)
2142 cs->indirect_info->outer_type = NULL;
2143 cs->indirect_info->offset = offset;
2144 cs->indirect_info->agg_contents = 1;
2145 cs->indirect_info->member_ptr = 1;
2146 }
2147
2148 return;
2149 }
2150
2151 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2152 object referenced in the expression is a formal parameter of the caller
2153 FBI->node (described by FBI->info), create a call note for the
2154 statement. */
2155
2156 static void
2157 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2158 gimple call, tree target)
2159 {
2160 tree obj = OBJ_TYPE_REF_OBJECT (target);
2161 int index;
2162 HOST_WIDE_INT anc_offset;
2163
2164 if (!flag_devirtualize)
2165 return;
2166
2167 if (TREE_CODE (obj) != SSA_NAME)
2168 return;
2169
2170 struct ipa_node_params *info = fbi->info;
2171 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2172 {
2173 struct ipa_jump_func jfunc;
2174 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2175 return;
2176
2177 anc_offset = 0;
2178 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2179 gcc_assert (index >= 0);
2180 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2181 call, &jfunc))
2182 return;
2183 }
2184 else
2185 {
2186 struct ipa_jump_func jfunc;
2187 gimple stmt = SSA_NAME_DEF_STMT (obj);
2188 tree expr;
2189
2190 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2191 if (!expr)
2192 return;
2193 index = ipa_get_param_decl_index (info,
2194 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2195 gcc_assert (index >= 0);
2196 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2197 call, &jfunc, anc_offset))
2198 return;
2199 }
2200
2201 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2202 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2203 ii->offset = anc_offset;
2204 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2205 ii->otr_type = obj_type_ref_class (target);
2206 ii->polymorphic = 1;
2207 }
2208
2209 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2210 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2211 containing intermediate information about each formal parameter. */
2212
2213 static void
2214 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2215 {
2216 tree target = gimple_call_fn (call);
2217
2218 if (!target
2219 || (TREE_CODE (target) != SSA_NAME
2220 && !virtual_method_call_p (target)))
2221 return;
2222
2223 /* If we previously turned the call into a direct call, there is
2224 no need to analyze. */
2225 struct cgraph_edge *cs = cgraph_edge (fbi->node, call);
2226 if (cs && !cs->indirect_unknown_callee)
2227 return;
2228 if (TREE_CODE (target) == SSA_NAME)
2229 ipa_analyze_indirect_call_uses (fbi, call, target);
2230 else if (virtual_method_call_p (target))
2231 ipa_analyze_virtual_call_uses (fbi, call, target);
2232 }
2233
2234
2235 /* Analyze the call statement STMT with respect to formal parameters (described
2236 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2237 formal parameters are called. */
2238
2239 static void
2240 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2241 {
2242 if (is_gimple_call (stmt))
2243 ipa_analyze_call_uses (fbi, stmt);
2244 }
2245
2246 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2247 If OP is a parameter declaration, mark it as used in the info structure
2248 passed in DATA. */
2249
2250 static bool
2251 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2252 {
2253 struct ipa_node_params *info = (struct ipa_node_params *) data;
2254
2255 op = get_base_address (op);
2256 if (op
2257 && TREE_CODE (op) == PARM_DECL)
2258 {
2259 int index = ipa_get_param_decl_index (info, op);
2260 gcc_assert (index >= 0);
2261 ipa_set_param_used (info, index, true);
2262 }
2263
2264 return false;
2265 }
2266
2267 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2268 the findings in various structures of the associated ipa_node_params
2269 structure, such as parameter flags, notes etc. FBI holds various data about
2270 the function being analyzed. */
2271
2272 static void
2273 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2274 {
2275 gimple_stmt_iterator gsi;
2276 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2277 {
2278 gimple stmt = gsi_stmt (gsi);
2279
2280 if (is_gimple_debug (stmt))
2281 continue;
2282
2283 ipa_analyze_stmt_uses (fbi, stmt);
2284 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2285 visit_ref_for_mod_analysis,
2286 visit_ref_for_mod_analysis,
2287 visit_ref_for_mod_analysis);
2288 }
2289 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2290 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2291 visit_ref_for_mod_analysis,
2292 visit_ref_for_mod_analysis,
2293 visit_ref_for_mod_analysis);
2294 }
2295
2296 /* Calculate controlled uses of parameters of NODE. */
2297
2298 static void
2299 ipa_analyze_controlled_uses (struct cgraph_node *node)
2300 {
2301 struct ipa_node_params *info = IPA_NODE_REF (node);
2302
2303 for (int i = 0; i < ipa_get_param_count (info); i++)
2304 {
2305 tree parm = ipa_get_param (info, i);
2306 int controlled_uses = 0;
2307
2308 /* For SSA regs see if parameter is used. For non-SSA we compute
2309 the flag during modification analysis. */
2310 if (is_gimple_reg (parm))
2311 {
2312 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2313 parm);
2314 if (ddef && !has_zero_uses (ddef))
2315 {
2316 imm_use_iterator imm_iter;
2317 use_operand_p use_p;
2318
2319 ipa_set_param_used (info, i, true);
2320 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2321 if (!is_gimple_call (USE_STMT (use_p)))
2322 {
2323 if (!is_gimple_debug (USE_STMT (use_p)))
2324 {
2325 controlled_uses = IPA_UNDESCRIBED_USE;
2326 break;
2327 }
2328 }
2329 else
2330 controlled_uses++;
2331 }
2332 else
2333 controlled_uses = 0;
2334 }
2335 else
2336 controlled_uses = IPA_UNDESCRIBED_USE;
2337 ipa_set_controlled_uses (info, i, controlled_uses);
2338 }
2339 }
2340
2341 /* Free stuff in BI. */
2342
2343 static void
2344 free_ipa_bb_info (struct ipa_bb_info *bi)
2345 {
2346 bi->cg_edges.release ();
2347 bi->param_aa_statuses.release ();
2348 }
2349
2350 /* Dominator walker driving the analysis. */
2351
2352 class analysis_dom_walker : public dom_walker
2353 {
2354 public:
2355 analysis_dom_walker (struct func_body_info *fbi)
2356 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2357
2358 virtual void before_dom_children (basic_block);
2359
2360 private:
2361 struct func_body_info *m_fbi;
2362 };
2363
2364 void
2365 analysis_dom_walker::before_dom_children (basic_block bb)
2366 {
2367 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2368 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2369 }
2370
2371 /* Initialize the array describing properties of of formal parameters
2372 of NODE, analyze their uses and compute jump functions associated
2373 with actual arguments of calls from within NODE. */
2374
2375 void
2376 ipa_analyze_node (struct cgraph_node *node)
2377 {
2378 struct func_body_info fbi;
2379 struct ipa_node_params *info;
2380
2381 ipa_check_create_node_params ();
2382 ipa_check_create_edge_args ();
2383 info = IPA_NODE_REF (node);
2384
2385 if (info->analysis_done)
2386 return;
2387 info->analysis_done = 1;
2388
2389 if (ipa_func_spec_opts_forbid_analysis_p (node))
2390 {
2391 for (int i = 0; i < ipa_get_param_count (info); i++)
2392 {
2393 ipa_set_param_used (info, i, true);
2394 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2395 }
2396 return;
2397 }
2398
2399 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2400 push_cfun (func);
2401 calculate_dominance_info (CDI_DOMINATORS);
2402 ipa_initialize_node_params (node);
2403 ipa_analyze_controlled_uses (node);
2404
2405 fbi.node = node;
2406 fbi.info = IPA_NODE_REF (node);
2407 fbi.bb_infos = vNULL;
2408 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2409 fbi.param_count = ipa_get_param_count (info);
2410 fbi.aa_walked = 0;
2411
2412 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2413 {
2414 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2415 bi->cg_edges.safe_push (cs);
2416 }
2417
2418 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2419 {
2420 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2421 bi->cg_edges.safe_push (cs);
2422 }
2423
2424 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2425
2426 int i;
2427 struct ipa_bb_info *bi;
2428 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2429 free_ipa_bb_info (bi);
2430 fbi.bb_infos.release ();
2431 free_dominance_info (CDI_DOMINATORS);
2432 pop_cfun ();
2433 }
2434
2435 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2436 attempt a type-based devirtualization. If successful, return the
2437 target function declaration, otherwise return NULL. */
2438
2439 tree
2440 ipa_intraprocedural_devirtualization (gimple call)
2441 {
2442 tree binfo, token, fndecl;
2443 struct ipa_jump_func jfunc;
2444 tree otr = gimple_call_fn (call);
2445
2446 jfunc.type = IPA_JF_UNKNOWN;
2447 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2448 call, obj_type_ref_class (otr));
2449 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2450 return NULL_TREE;
2451 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2452 if (!binfo)
2453 return NULL_TREE;
2454 token = OBJ_TYPE_REF_TOKEN (otr);
2455 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2456 binfo);
2457 #ifdef ENABLE_CHECKING
2458 if (fndecl)
2459 gcc_assert (possible_polymorphic_call_target_p
2460 (otr, cgraph_get_node (fndecl)));
2461 #endif
2462 return fndecl;
2463 }
2464
2465 /* Update the jump function DST when the call graph edge corresponding to SRC is
2466 is being inlined, knowing that DST is of type ancestor and src of known
2467 type. */
2468
2469 static void
2470 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2471 struct ipa_jump_func *dst)
2472 {
2473 HOST_WIDE_INT combined_offset;
2474 tree combined_type;
2475
2476 if (!ipa_get_jf_ancestor_type_preserved (dst))
2477 {
2478 dst->type = IPA_JF_UNKNOWN;
2479 return;
2480 }
2481
2482 combined_offset = ipa_get_jf_known_type_offset (src)
2483 + ipa_get_jf_ancestor_offset (dst);
2484 combined_type = ipa_get_jf_ancestor_type (dst);
2485
2486 ipa_set_jf_known_type (dst, combined_offset,
2487 ipa_get_jf_known_type_base_type (src),
2488 combined_type);
2489 }
2490
2491 /* Update the jump functions associated with call graph edge E when the call
2492 graph edge CS is being inlined, assuming that E->caller is already (possibly
2493 indirectly) inlined into CS->callee and that E has not been inlined. */
2494
2495 static void
2496 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2497 struct cgraph_edge *e)
2498 {
2499 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2500 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2501 int count = ipa_get_cs_argument_count (args);
2502 int i;
2503
2504 for (i = 0; i < count; i++)
2505 {
2506 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2507
2508 if (dst->type == IPA_JF_ANCESTOR)
2509 {
2510 struct ipa_jump_func *src;
2511 int dst_fid = dst->value.ancestor.formal_id;
2512
2513 /* Variable number of arguments can cause havoc if we try to access
2514 one that does not exist in the inlined edge. So make sure we
2515 don't. */
2516 if (dst_fid >= ipa_get_cs_argument_count (top))
2517 {
2518 dst->type = IPA_JF_UNKNOWN;
2519 continue;
2520 }
2521
2522 src = ipa_get_ith_jump_func (top, dst_fid);
2523
2524 if (src->agg.items
2525 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2526 {
2527 struct ipa_agg_jf_item *item;
2528 int j;
2529
2530 /* Currently we do not produce clobber aggregate jump functions,
2531 replace with merging when we do. */
2532 gcc_assert (!dst->agg.items);
2533
2534 dst->agg.items = vec_safe_copy (src->agg.items);
2535 dst->agg.by_ref = src->agg.by_ref;
2536 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2537 item->offset -= dst->value.ancestor.offset;
2538 }
2539
2540 if (src->type == IPA_JF_KNOWN_TYPE)
2541 combine_known_type_and_ancestor_jfs (src, dst);
2542 else if (src->type == IPA_JF_PASS_THROUGH
2543 && src->value.pass_through.operation == NOP_EXPR)
2544 {
2545 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2546 dst->value.ancestor.agg_preserved &=
2547 src->value.pass_through.agg_preserved;
2548 dst->value.ancestor.type_preserved &=
2549 src->value.pass_through.type_preserved;
2550 }
2551 else if (src->type == IPA_JF_ANCESTOR)
2552 {
2553 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2554 dst->value.ancestor.offset += src->value.ancestor.offset;
2555 dst->value.ancestor.agg_preserved &=
2556 src->value.ancestor.agg_preserved;
2557 dst->value.ancestor.type_preserved &=
2558 src->value.ancestor.type_preserved;
2559 }
2560 else
2561 dst->type = IPA_JF_UNKNOWN;
2562 }
2563 else if (dst->type == IPA_JF_PASS_THROUGH)
2564 {
2565 struct ipa_jump_func *src;
2566 /* We must check range due to calls with variable number of arguments
2567 and we cannot combine jump functions with operations. */
2568 if (dst->value.pass_through.operation == NOP_EXPR
2569 && (dst->value.pass_through.formal_id
2570 < ipa_get_cs_argument_count (top)))
2571 {
2572 int dst_fid = dst->value.pass_through.formal_id;
2573 src = ipa_get_ith_jump_func (top, dst_fid);
2574 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2575
2576 switch (src->type)
2577 {
2578 case IPA_JF_UNKNOWN:
2579 dst->type = IPA_JF_UNKNOWN;
2580 break;
2581 case IPA_JF_KNOWN_TYPE:
2582 if (ipa_get_jf_pass_through_type_preserved (dst))
2583 ipa_set_jf_known_type (dst,
2584 ipa_get_jf_known_type_offset (src),
2585 ipa_get_jf_known_type_base_type (src),
2586 ipa_get_jf_known_type_component_type (src));
2587 else
2588 dst->type = IPA_JF_UNKNOWN;
2589 break;
2590 case IPA_JF_CONST:
2591 ipa_set_jf_cst_copy (dst, src);
2592 break;
2593
2594 case IPA_JF_PASS_THROUGH:
2595 {
2596 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2597 enum tree_code operation;
2598 operation = ipa_get_jf_pass_through_operation (src);
2599
2600 if (operation == NOP_EXPR)
2601 {
2602 bool agg_p, type_p;
2603 agg_p = dst_agg_p
2604 && ipa_get_jf_pass_through_agg_preserved (src);
2605 type_p = ipa_get_jf_pass_through_type_preserved (src)
2606 && ipa_get_jf_pass_through_type_preserved (dst);
2607 ipa_set_jf_simple_pass_through (dst, formal_id,
2608 agg_p, type_p);
2609 }
2610 else
2611 {
2612 tree operand = ipa_get_jf_pass_through_operand (src);
2613 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2614 operation);
2615 }
2616 break;
2617 }
2618 case IPA_JF_ANCESTOR:
2619 {
2620 bool agg_p, type_p;
2621 agg_p = dst_agg_p
2622 && ipa_get_jf_ancestor_agg_preserved (src);
2623 type_p = ipa_get_jf_ancestor_type_preserved (src)
2624 && ipa_get_jf_pass_through_type_preserved (dst);
2625 ipa_set_ancestor_jf (dst,
2626 ipa_get_jf_ancestor_offset (src),
2627 ipa_get_jf_ancestor_type (src),
2628 ipa_get_jf_ancestor_formal_id (src),
2629 agg_p, type_p);
2630 break;
2631 }
2632 default:
2633 gcc_unreachable ();
2634 }
2635
2636 if (src->agg.items
2637 && (dst_agg_p || !src->agg.by_ref))
2638 {
2639 /* Currently we do not produce clobber aggregate jump
2640 functions, replace with merging when we do. */
2641 gcc_assert (!dst->agg.items);
2642
2643 dst->agg.by_ref = src->agg.by_ref;
2644 dst->agg.items = vec_safe_copy (src->agg.items);
2645 }
2646 }
2647 else
2648 dst->type = IPA_JF_UNKNOWN;
2649 }
2650 }
2651 }
2652
2653 /* If TARGET is an addr_expr of a function declaration, make it the destination
2654 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2655
2656 struct cgraph_edge *
2657 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2658 {
2659 struct cgraph_node *callee;
2660 struct inline_edge_summary *es = inline_edge_summary (ie);
2661 bool unreachable = false;
2662
2663 if (TREE_CODE (target) == ADDR_EXPR)
2664 target = TREE_OPERAND (target, 0);
2665 if (TREE_CODE (target) != FUNCTION_DECL)
2666 {
2667 target = canonicalize_constructor_val (target, NULL);
2668 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2669 {
2670 if (ie->indirect_info->member_ptr)
2671 /* Member pointer call that goes through a VMT lookup. */
2672 return NULL;
2673
2674 if (dump_enabled_p ())
2675 {
2676 location_t loc = gimple_location_safe (ie->call_stmt);
2677 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2678 "discovered direct call to non-function in %s/%i, "
2679 "making it __builtin_unreachable\n",
2680 ie->caller->name (), ie->caller->order);
2681 }
2682
2683 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2684 callee = cgraph_get_create_node (target);
2685 unreachable = true;
2686 }
2687 else
2688 callee = cgraph_get_node (target);
2689 }
2690 else
2691 callee = cgraph_get_node (target);
2692
2693 /* Because may-edges are not explicitely represented and vtable may be external,
2694 we may create the first reference to the object in the unit. */
2695 if (!callee || callee->global.inlined_to)
2696 {
2697
2698 /* We are better to ensure we can refer to it.
2699 In the case of static functions we are out of luck, since we already
2700 removed its body. In the case of public functions we may or may
2701 not introduce the reference. */
2702 if (!canonicalize_constructor_val (target, NULL)
2703 || !TREE_PUBLIC (target))
2704 {
2705 if (dump_file)
2706 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2707 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2708 xstrdup (ie->caller->name ()),
2709 ie->caller->order,
2710 xstrdup (ie->callee->name ()),
2711 ie->callee->order);
2712 return NULL;
2713 }
2714 callee = cgraph_get_create_node (target);
2715 }
2716
2717 if (!dbg_cnt (devirt))
2718 return NULL;
2719
2720 ipa_check_create_node_params ();
2721
2722 /* We can not make edges to inline clones. It is bug that someone removed
2723 the cgraph node too early. */
2724 gcc_assert (!callee->global.inlined_to);
2725
2726 if (dump_file && !unreachable)
2727 {
2728 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2729 "(%s/%i -> %s/%i), for stmt ",
2730 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2731 xstrdup (ie->caller->name ()),
2732 ie->caller->order,
2733 xstrdup (callee->name ()),
2734 callee->order);
2735 if (ie->call_stmt)
2736 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2737 else
2738 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2739 }
2740 if (dump_enabled_p ())
2741 {
2742 location_t loc = gimple_location_safe (ie->call_stmt);
2743
2744 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2745 "converting indirect call in %s to direct call to %s\n",
2746 ie->caller->name (), callee->name ());
2747 }
2748 ie = cgraph_make_edge_direct (ie, callee);
2749 es = inline_edge_summary (ie);
2750 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2751 - eni_size_weights.call_cost);
2752 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2753 - eni_time_weights.call_cost);
2754
2755 return ie;
2756 }
2757
2758 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2759 return NULL if there is not any. BY_REF specifies whether the value has to
2760 be passed by reference or by value. */
2761
2762 tree
2763 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2764 HOST_WIDE_INT offset, bool by_ref)
2765 {
2766 struct ipa_agg_jf_item *item;
2767 int i;
2768
2769 if (by_ref != agg->by_ref)
2770 return NULL;
2771
2772 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2773 if (item->offset == offset)
2774 {
2775 /* Currently we do not have clobber values, return NULL for them once
2776 we do. */
2777 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2778 return item->value;
2779 }
2780 return NULL;
2781 }
2782
2783 /* Remove a reference to SYMBOL from the list of references of a node given by
2784 reference description RDESC. Return true if the reference has been
2785 successfully found and removed. */
2786
2787 static bool
2788 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2789 {
2790 struct ipa_ref *to_del;
2791 struct cgraph_edge *origin;
2792
2793 origin = rdesc->cs;
2794 if (!origin)
2795 return false;
2796 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2797 origin->lto_stmt_uid);
2798 if (!to_del)
2799 return false;
2800
2801 to_del->remove_reference ();
2802 if (dump_file)
2803 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2804 xstrdup (origin->caller->name ()),
2805 origin->caller->order, xstrdup (symbol->name ()));
2806 return true;
2807 }
2808
2809 /* If JFUNC has a reference description with refcount different from
2810 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2811 NULL. JFUNC must be a constant jump function. */
2812
2813 static struct ipa_cst_ref_desc *
2814 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2815 {
2816 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2817 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2818 return rdesc;
2819 else
2820 return NULL;
2821 }
2822
2823 /* If the value of constant jump function JFUNC is an address of a function
2824 declaration, return the associated call graph node. Otherwise return
2825 NULL. */
2826
2827 static cgraph_node *
2828 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2829 {
2830 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2831 tree cst = ipa_get_jf_constant (jfunc);
2832 if (TREE_CODE (cst) != ADDR_EXPR
2833 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2834 return NULL;
2835
2836 return cgraph_get_node (TREE_OPERAND (cst, 0));
2837 }
2838
2839
2840 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2841 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2842 the edge specified in the rdesc. Return false if either the symbol or the
2843 reference could not be found, otherwise return true. */
2844
2845 static bool
2846 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2847 {
2848 struct ipa_cst_ref_desc *rdesc;
2849 if (jfunc->type == IPA_JF_CONST
2850 && (rdesc = jfunc_rdesc_usable (jfunc))
2851 && --rdesc->refcount == 0)
2852 {
2853 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2854 if (!symbol)
2855 return false;
2856
2857 return remove_described_reference (symbol, rdesc);
2858 }
2859 return true;
2860 }
2861
2862 /* Try to find a destination for indirect edge IE that corresponds to a simple
2863 call or a call of a member function pointer and where the destination is a
2864 pointer formal parameter described by jump function JFUNC. If it can be
2865 determined, return the newly direct edge, otherwise return NULL.
2866 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2867
2868 static struct cgraph_edge *
2869 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2870 struct ipa_jump_func *jfunc,
2871 struct ipa_node_params *new_root_info)
2872 {
2873 struct cgraph_edge *cs;
2874 tree target;
2875 bool agg_contents = ie->indirect_info->agg_contents;
2876
2877 if (ie->indirect_info->agg_contents)
2878 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2879 ie->indirect_info->offset,
2880 ie->indirect_info->by_ref);
2881 else
2882 target = ipa_value_from_jfunc (new_root_info, jfunc);
2883 if (!target)
2884 return NULL;
2885 cs = ipa_make_edge_direct_to_target (ie, target);
2886
2887 if (cs && !agg_contents)
2888 {
2889 bool ok;
2890 gcc_checking_assert (cs->callee
2891 && (cs != ie
2892 || jfunc->type != IPA_JF_CONST
2893 || !cgraph_node_for_jfunc (jfunc)
2894 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2895 ok = try_decrement_rdesc_refcount (jfunc);
2896 gcc_checking_assert (ok);
2897 }
2898
2899 return cs;
2900 }
2901
2902 /* Return the target to be used in cases of impossible devirtualization. IE
2903 and target (the latter can be NULL) are dumped when dumping is enabled. */
2904
2905 tree
2906 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2907 {
2908 if (dump_file)
2909 {
2910 if (target)
2911 fprintf (dump_file,
2912 "Type inconsistent devirtualization: %s/%i->%s\n",
2913 ie->caller->name (), ie->caller->order,
2914 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2915 else
2916 fprintf (dump_file,
2917 "No devirtualization target in %s/%i\n",
2918 ie->caller->name (), ie->caller->order);
2919 }
2920 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2921 cgraph_get_create_node (new_target);
2922 return new_target;
2923 }
2924
2925 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2926 call based on a formal parameter which is described by jump function JFUNC
2927 and if it can be determined, make it direct and return the direct edge.
2928 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2929 are relative to. */
2930
2931 static struct cgraph_edge *
2932 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2933 struct ipa_jump_func *jfunc,
2934 struct ipa_node_params *new_root_info)
2935 {
2936 tree binfo, target;
2937
2938 if (!flag_devirtualize)
2939 return NULL;
2940
2941 /* First try to do lookup via known virtual table pointer value. */
2942 if (!ie->indirect_info->by_ref)
2943 {
2944 tree vtable;
2945 unsigned HOST_WIDE_INT offset;
2946 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2947 ie->indirect_info->offset,
2948 true);
2949 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2950 {
2951 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2952 vtable, offset);
2953 if (target)
2954 {
2955 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
2956 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
2957 || !possible_polymorphic_call_target_p
2958 (ie, cgraph_get_node (target)))
2959 target = ipa_impossible_devirt_target (ie, target);
2960 return ipa_make_edge_direct_to_target (ie, target);
2961 }
2962 }
2963 }
2964
2965 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2966
2967 if (!binfo)
2968 return NULL;
2969
2970 if (TREE_CODE (binfo) != TREE_BINFO)
2971 {
2972 ipa_polymorphic_call_context context;
2973 vec <cgraph_node *>targets;
2974 bool final;
2975
2976 if (!get_polymorphic_call_info_from_invariant
2977 (&context, binfo, ie->indirect_info->otr_type,
2978 ie->indirect_info->offset))
2979 return NULL;
2980 targets = possible_polymorphic_call_targets
2981 (ie->indirect_info->otr_type,
2982 ie->indirect_info->otr_token,
2983 context, &final);
2984 if (!final || targets.length () > 1)
2985 return NULL;
2986 if (targets.length () == 1)
2987 target = targets[0]->decl;
2988 else
2989 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2990 }
2991 else
2992 {
2993 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2994 ie->indirect_info->otr_type);
2995 if (binfo)
2996 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2997 binfo);
2998 else
2999 return NULL;
3000 }
3001
3002 if (target)
3003 {
3004 if (!possible_polymorphic_call_target_p (ie, cgraph_get_node (target)))
3005 target = ipa_impossible_devirt_target (ie, target);
3006 return ipa_make_edge_direct_to_target (ie, target);
3007 }
3008 else
3009 return NULL;
3010 }
3011
3012 /* Update the param called notes associated with NODE when CS is being inlined,
3013 assuming NODE is (potentially indirectly) inlined into CS->callee.
3014 Moreover, if the callee is discovered to be constant, create a new cgraph
3015 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3016 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3017
3018 static bool
3019 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3020 struct cgraph_node *node,
3021 vec<cgraph_edge_p> *new_edges)
3022 {
3023 struct ipa_edge_args *top;
3024 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3025 struct ipa_node_params *new_root_info;
3026 bool res = false;
3027
3028 ipa_check_create_edge_args ();
3029 top = IPA_EDGE_REF (cs);
3030 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3031 ? cs->caller->global.inlined_to
3032 : cs->caller);
3033
3034 for (ie = node->indirect_calls; ie; ie = next_ie)
3035 {
3036 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3037 struct ipa_jump_func *jfunc;
3038 int param_index;
3039
3040 next_ie = ie->next_callee;
3041
3042 if (ici->param_index == -1)
3043 continue;
3044
3045 /* We must check range due to calls with variable number of arguments: */
3046 if (ici->param_index >= ipa_get_cs_argument_count (top))
3047 {
3048 ici->param_index = -1;
3049 continue;
3050 }
3051
3052 param_index = ici->param_index;
3053 jfunc = ipa_get_ith_jump_func (top, param_index);
3054
3055 if (!flag_indirect_inlining)
3056 new_direct_edge = NULL;
3057 else if (ici->polymorphic)
3058 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3059 new_root_info);
3060 else
3061 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3062 new_root_info);
3063 /* If speculation was removed, then we need to do nothing. */
3064 if (new_direct_edge && new_direct_edge != ie)
3065 {
3066 new_direct_edge->indirect_inlining_edge = 1;
3067 top = IPA_EDGE_REF (cs);
3068 res = true;
3069 }
3070 else if (new_direct_edge)
3071 {
3072 new_direct_edge->indirect_inlining_edge = 1;
3073 if (new_direct_edge->call_stmt)
3074 new_direct_edge->call_stmt_cannot_inline_p
3075 = !gimple_check_call_matching_types (
3076 new_direct_edge->call_stmt,
3077 new_direct_edge->callee->decl, false);
3078 if (new_edges)
3079 {
3080 new_edges->safe_push (new_direct_edge);
3081 res = true;
3082 }
3083 top = IPA_EDGE_REF (cs);
3084 }
3085 else if (jfunc->type == IPA_JF_PASS_THROUGH
3086 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3087 {
3088 if ((ici->agg_contents
3089 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3090 || (ici->polymorphic
3091 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3092 ici->param_index = -1;
3093 else
3094 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3095 }
3096 else if (jfunc->type == IPA_JF_ANCESTOR)
3097 {
3098 if ((ici->agg_contents
3099 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3100 || (ici->polymorphic
3101 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3102 ici->param_index = -1;
3103 else
3104 {
3105 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3106 if (ipa_get_jf_ancestor_offset (jfunc))
3107 ici->outer_type = NULL;
3108 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3109 }
3110 }
3111 else
3112 /* Either we can find a destination for this edge now or never. */
3113 ici->param_index = -1;
3114 }
3115
3116 return res;
3117 }
3118
3119 /* Recursively traverse subtree of NODE (including node) made of inlined
3120 cgraph_edges when CS has been inlined and invoke
3121 update_indirect_edges_after_inlining on all nodes and
3122 update_jump_functions_after_inlining on all non-inlined edges that lead out
3123 of this subtree. Newly discovered indirect edges will be added to
3124 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3125 created. */
3126
3127 static bool
3128 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3129 struct cgraph_node *node,
3130 vec<cgraph_edge_p> *new_edges)
3131 {
3132 struct cgraph_edge *e;
3133 bool res;
3134
3135 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3136
3137 for (e = node->callees; e; e = e->next_callee)
3138 if (!e->inline_failed)
3139 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3140 else
3141 update_jump_functions_after_inlining (cs, e);
3142 for (e = node->indirect_calls; e; e = e->next_callee)
3143 update_jump_functions_after_inlining (cs, e);
3144
3145 return res;
3146 }
3147
3148 /* Combine two controlled uses counts as done during inlining. */
3149
3150 static int
3151 combine_controlled_uses_counters (int c, int d)
3152 {
3153 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3154 return IPA_UNDESCRIBED_USE;
3155 else
3156 return c + d - 1;
3157 }
3158
3159 /* Propagate number of controlled users from CS->caleee to the new root of the
3160 tree of inlined nodes. */
3161
3162 static void
3163 propagate_controlled_uses (struct cgraph_edge *cs)
3164 {
3165 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3166 struct cgraph_node *new_root = cs->caller->global.inlined_to
3167 ? cs->caller->global.inlined_to : cs->caller;
3168 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3169 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3170 int count, i;
3171
3172 count = MIN (ipa_get_cs_argument_count (args),
3173 ipa_get_param_count (old_root_info));
3174 for (i = 0; i < count; i++)
3175 {
3176 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3177 struct ipa_cst_ref_desc *rdesc;
3178
3179 if (jf->type == IPA_JF_PASS_THROUGH)
3180 {
3181 int src_idx, c, d;
3182 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3183 c = ipa_get_controlled_uses (new_root_info, src_idx);
3184 d = ipa_get_controlled_uses (old_root_info, i);
3185
3186 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3187 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3188 c = combine_controlled_uses_counters (c, d);
3189 ipa_set_controlled_uses (new_root_info, src_idx, c);
3190 if (c == 0 && new_root_info->ipcp_orig_node)
3191 {
3192 struct cgraph_node *n;
3193 struct ipa_ref *ref;
3194 tree t = new_root_info->known_vals[src_idx];
3195
3196 if (t && TREE_CODE (t) == ADDR_EXPR
3197 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3198 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
3199 && (ref = new_root->find_reference (n, NULL, 0)))
3200 {
3201 if (dump_file)
3202 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3203 "reference from %s/%i to %s/%i.\n",
3204 xstrdup (new_root->name ()),
3205 new_root->order,
3206 xstrdup (n->name ()), n->order);
3207 ref->remove_reference ();
3208 }
3209 }
3210 }
3211 else if (jf->type == IPA_JF_CONST
3212 && (rdesc = jfunc_rdesc_usable (jf)))
3213 {
3214 int d = ipa_get_controlled_uses (old_root_info, i);
3215 int c = rdesc->refcount;
3216 rdesc->refcount = combine_controlled_uses_counters (c, d);
3217 if (rdesc->refcount == 0)
3218 {
3219 tree cst = ipa_get_jf_constant (jf);
3220 struct cgraph_node *n;
3221 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3222 && TREE_CODE (TREE_OPERAND (cst, 0))
3223 == FUNCTION_DECL);
3224 n = cgraph_get_node (TREE_OPERAND (cst, 0));
3225 if (n)
3226 {
3227 struct cgraph_node *clone;
3228 bool ok;
3229 ok = remove_described_reference (n, rdesc);
3230 gcc_checking_assert (ok);
3231
3232 clone = cs->caller;
3233 while (clone->global.inlined_to
3234 && clone != rdesc->cs->caller
3235 && IPA_NODE_REF (clone)->ipcp_orig_node)
3236 {
3237 struct ipa_ref *ref;
3238 ref = clone->find_reference (n, NULL, 0);
3239 if (ref)
3240 {
3241 if (dump_file)
3242 fprintf (dump_file, "ipa-prop: Removing "
3243 "cloning-created reference "
3244 "from %s/%i to %s/%i.\n",
3245 xstrdup (clone->name ()),
3246 clone->order,
3247 xstrdup (n->name ()),
3248 n->order);
3249 ref->remove_reference ();
3250 }
3251 clone = clone->callers->caller;
3252 }
3253 }
3254 }
3255 }
3256 }
3257
3258 for (i = ipa_get_param_count (old_root_info);
3259 i < ipa_get_cs_argument_count (args);
3260 i++)
3261 {
3262 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3263
3264 if (jf->type == IPA_JF_CONST)
3265 {
3266 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3267 if (rdesc)
3268 rdesc->refcount = IPA_UNDESCRIBED_USE;
3269 }
3270 else if (jf->type == IPA_JF_PASS_THROUGH)
3271 ipa_set_controlled_uses (new_root_info,
3272 jf->value.pass_through.formal_id,
3273 IPA_UNDESCRIBED_USE);
3274 }
3275 }
3276
3277 /* Update jump functions and call note functions on inlining the call site CS.
3278 CS is expected to lead to a node already cloned by
3279 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3280 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3281 created. */
3282
3283 bool
3284 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3285 vec<cgraph_edge_p> *new_edges)
3286 {
3287 bool changed;
3288 /* Do nothing if the preparation phase has not been carried out yet
3289 (i.e. during early inlining). */
3290 if (!ipa_node_params_vector.exists ())
3291 return false;
3292 gcc_assert (ipa_edge_args_vector);
3293
3294 propagate_controlled_uses (cs);
3295 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3296
3297 return changed;
3298 }
3299
3300 /* Frees all dynamically allocated structures that the argument info points
3301 to. */
3302
3303 void
3304 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3305 {
3306 vec_free (args->jump_functions);
3307 memset (args, 0, sizeof (*args));
3308 }
3309
3310 /* Free all ipa_edge structures. */
3311
3312 void
3313 ipa_free_all_edge_args (void)
3314 {
3315 int i;
3316 struct ipa_edge_args *args;
3317
3318 if (!ipa_edge_args_vector)
3319 return;
3320
3321 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3322 ipa_free_edge_args_substructures (args);
3323
3324 vec_free (ipa_edge_args_vector);
3325 }
3326
3327 /* Frees all dynamically allocated structures that the param info points
3328 to. */
3329
3330 void
3331 ipa_free_node_params_substructures (struct ipa_node_params *info)
3332 {
3333 info->descriptors.release ();
3334 free (info->lattices);
3335 /* Lattice values and their sources are deallocated with their alocation
3336 pool. */
3337 info->known_vals.release ();
3338 memset (info, 0, sizeof (*info));
3339 }
3340
3341 /* Free all ipa_node_params structures. */
3342
3343 void
3344 ipa_free_all_node_params (void)
3345 {
3346 int i;
3347 struct ipa_node_params *info;
3348
3349 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3350 ipa_free_node_params_substructures (info);
3351
3352 ipa_node_params_vector.release ();
3353 }
3354
3355 /* Set the aggregate replacements of NODE to be AGGVALS. */
3356
3357 void
3358 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3359 struct ipa_agg_replacement_value *aggvals)
3360 {
3361 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3362 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3363
3364 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3365 }
3366
3367 /* Hook that is called by cgraph.c when an edge is removed. */
3368
3369 static void
3370 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3371 {
3372 struct ipa_edge_args *args;
3373
3374 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3375 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3376 return;
3377
3378 args = IPA_EDGE_REF (cs);
3379 if (args->jump_functions)
3380 {
3381 struct ipa_jump_func *jf;
3382 int i;
3383 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3384 {
3385 struct ipa_cst_ref_desc *rdesc;
3386 try_decrement_rdesc_refcount (jf);
3387 if (jf->type == IPA_JF_CONST
3388 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3389 && rdesc->cs == cs)
3390 rdesc->cs = NULL;
3391 }
3392 }
3393
3394 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3395 }
3396
3397 /* Hook that is called by cgraph.c when a node is removed. */
3398
3399 static void
3400 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3401 {
3402 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3403 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3404 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3405 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3406 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3407 }
3408
3409 /* Hook that is called by cgraph.c when an edge is duplicated. */
3410
3411 static void
3412 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3413 __attribute__((unused)) void *data)
3414 {
3415 struct ipa_edge_args *old_args, *new_args;
3416 unsigned int i;
3417
3418 ipa_check_create_edge_args ();
3419
3420 old_args = IPA_EDGE_REF (src);
3421 new_args = IPA_EDGE_REF (dst);
3422
3423 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3424
3425 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3426 {
3427 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3428 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3429
3430 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3431
3432 if (src_jf->type == IPA_JF_CONST)
3433 {
3434 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3435
3436 if (!src_rdesc)
3437 dst_jf->value.constant.rdesc = NULL;
3438 else if (src->caller == dst->caller)
3439 {
3440 struct ipa_ref *ref;
3441 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3442 gcc_checking_assert (n);
3443 ref = src->caller->find_reference (n, src->call_stmt,
3444 src->lto_stmt_uid);
3445 gcc_checking_assert (ref);
3446 dst->caller->clone_reference (ref, ref->stmt);
3447
3448 gcc_checking_assert (ipa_refdesc_pool);
3449 struct ipa_cst_ref_desc *dst_rdesc
3450 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3451 dst_rdesc->cs = dst;
3452 dst_rdesc->refcount = src_rdesc->refcount;
3453 dst_rdesc->next_duplicate = NULL;
3454 dst_jf->value.constant.rdesc = dst_rdesc;
3455 }
3456 else if (src_rdesc->cs == src)
3457 {
3458 struct ipa_cst_ref_desc *dst_rdesc;
3459 gcc_checking_assert (ipa_refdesc_pool);
3460 dst_rdesc
3461 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3462 dst_rdesc->cs = dst;
3463 dst_rdesc->refcount = src_rdesc->refcount;
3464 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3465 src_rdesc->next_duplicate = dst_rdesc;
3466 dst_jf->value.constant.rdesc = dst_rdesc;
3467 }
3468 else
3469 {
3470 struct ipa_cst_ref_desc *dst_rdesc;
3471 /* This can happen during inlining, when a JFUNC can refer to a
3472 reference taken in a function up in the tree of inline clones.
3473 We need to find the duplicate that refers to our tree of
3474 inline clones. */
3475
3476 gcc_assert (dst->caller->global.inlined_to);
3477 for (dst_rdesc = src_rdesc->next_duplicate;
3478 dst_rdesc;
3479 dst_rdesc = dst_rdesc->next_duplicate)
3480 {
3481 struct cgraph_node *top;
3482 top = dst_rdesc->cs->caller->global.inlined_to
3483 ? dst_rdesc->cs->caller->global.inlined_to
3484 : dst_rdesc->cs->caller;
3485 if (dst->caller->global.inlined_to == top)
3486 break;
3487 }
3488 gcc_assert (dst_rdesc);
3489 dst_jf->value.constant.rdesc = dst_rdesc;
3490 }
3491 }
3492 }
3493 }
3494
3495 /* Hook that is called by cgraph.c when a node is duplicated. */
3496
3497 static void
3498 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3499 ATTRIBUTE_UNUSED void *data)
3500 {
3501 struct ipa_node_params *old_info, *new_info;
3502 struct ipa_agg_replacement_value *old_av, *new_av;
3503
3504 ipa_check_create_node_params ();
3505 old_info = IPA_NODE_REF (src);
3506 new_info = IPA_NODE_REF (dst);
3507
3508 new_info->descriptors = old_info->descriptors.copy ();
3509 new_info->lattices = NULL;
3510 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3511
3512 new_info->analysis_done = old_info->analysis_done;
3513 new_info->node_enqueued = old_info->node_enqueued;
3514
3515 old_av = ipa_get_agg_replacements_for_node (src);
3516 if (!old_av)
3517 return;
3518
3519 new_av = NULL;
3520 while (old_av)
3521 {
3522 struct ipa_agg_replacement_value *v;
3523
3524 v = ggc_alloc<ipa_agg_replacement_value> ();
3525 memcpy (v, old_av, sizeof (*v));
3526 v->next = new_av;
3527 new_av = v;
3528 old_av = old_av->next;
3529 }
3530 ipa_set_node_agg_value_chain (dst, new_av);
3531 }
3532
3533
3534 /* Analyze newly added function into callgraph. */
3535
3536 static void
3537 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3538 {
3539 if (cgraph_function_with_gimple_body_p (node))
3540 ipa_analyze_node (node);
3541 }
3542
3543 /* Register our cgraph hooks if they are not already there. */
3544
3545 void
3546 ipa_register_cgraph_hooks (void)
3547 {
3548 if (!edge_removal_hook_holder)
3549 edge_removal_hook_holder =
3550 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3551 if (!node_removal_hook_holder)
3552 node_removal_hook_holder =
3553 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3554 if (!edge_duplication_hook_holder)
3555 edge_duplication_hook_holder =
3556 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3557 if (!node_duplication_hook_holder)
3558 node_duplication_hook_holder =
3559 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3560 function_insertion_hook_holder =
3561 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3562 }
3563
3564 /* Unregister our cgraph hooks if they are not already there. */
3565
3566 static void
3567 ipa_unregister_cgraph_hooks (void)
3568 {
3569 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3570 edge_removal_hook_holder = NULL;
3571 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3572 node_removal_hook_holder = NULL;
3573 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3574 edge_duplication_hook_holder = NULL;
3575 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3576 node_duplication_hook_holder = NULL;
3577 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3578 function_insertion_hook_holder = NULL;
3579 }
3580
3581 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3582 longer needed after ipa-cp. */
3583
3584 void
3585 ipa_free_all_structures_after_ipa_cp (void)
3586 {
3587 if (!optimize)
3588 {
3589 ipa_free_all_edge_args ();
3590 ipa_free_all_node_params ();
3591 free_alloc_pool (ipcp_sources_pool);
3592 free_alloc_pool (ipcp_values_pool);
3593 free_alloc_pool (ipcp_agg_lattice_pool);
3594 ipa_unregister_cgraph_hooks ();
3595 if (ipa_refdesc_pool)
3596 free_alloc_pool (ipa_refdesc_pool);
3597 }
3598 }
3599
3600 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3601 longer needed after indirect inlining. */
3602
3603 void
3604 ipa_free_all_structures_after_iinln (void)
3605 {
3606 ipa_free_all_edge_args ();
3607 ipa_free_all_node_params ();
3608 ipa_unregister_cgraph_hooks ();
3609 if (ipcp_sources_pool)
3610 free_alloc_pool (ipcp_sources_pool);
3611 if (ipcp_values_pool)
3612 free_alloc_pool (ipcp_values_pool);
3613 if (ipcp_agg_lattice_pool)
3614 free_alloc_pool (ipcp_agg_lattice_pool);
3615 if (ipa_refdesc_pool)
3616 free_alloc_pool (ipa_refdesc_pool);
3617 }
3618
3619 /* Print ipa_tree_map data structures of all functions in the
3620 callgraph to F. */
3621
3622 void
3623 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3624 {
3625 int i, count;
3626 struct ipa_node_params *info;
3627
3628 if (!node->definition)
3629 return;
3630 info = IPA_NODE_REF (node);
3631 fprintf (f, " function %s/%i parameter descriptors:\n",
3632 node->name (), node->order);
3633 count = ipa_get_param_count (info);
3634 for (i = 0; i < count; i++)
3635 {
3636 int c;
3637
3638 fprintf (f, " ");
3639 ipa_dump_param (f, info, i);
3640 if (ipa_is_param_used (info, i))
3641 fprintf (f, " used");
3642 c = ipa_get_controlled_uses (info, i);
3643 if (c == IPA_UNDESCRIBED_USE)
3644 fprintf (f, " undescribed_use");
3645 else
3646 fprintf (f, " controlled_uses=%i", c);
3647 fprintf (f, "\n");
3648 }
3649 }
3650
3651 /* Print ipa_tree_map data structures of all functions in the
3652 callgraph to F. */
3653
3654 void
3655 ipa_print_all_params (FILE * f)
3656 {
3657 struct cgraph_node *node;
3658
3659 fprintf (f, "\nFunction parameters:\n");
3660 FOR_EACH_FUNCTION (node)
3661 ipa_print_node_params (f, node);
3662 }
3663
3664 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3665
3666 vec<tree>
3667 ipa_get_vector_of_formal_parms (tree fndecl)
3668 {
3669 vec<tree> args;
3670 int count;
3671 tree parm;
3672
3673 gcc_assert (!flag_wpa);
3674 count = count_formal_params (fndecl);
3675 args.create (count);
3676 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3677 args.quick_push (parm);
3678
3679 return args;
3680 }
3681
3682 /* Return a heap allocated vector containing types of formal parameters of
3683 function type FNTYPE. */
3684
3685 vec<tree>
3686 ipa_get_vector_of_formal_parm_types (tree fntype)
3687 {
3688 vec<tree> types;
3689 int count = 0;
3690 tree t;
3691
3692 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3693 count++;
3694
3695 types.create (count);
3696 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3697 types.quick_push (TREE_VALUE (t));
3698
3699 return types;
3700 }
3701
3702 /* Modify the function declaration FNDECL and its type according to the plan in
3703 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3704 to reflect the actual parameters being modified which are determined by the
3705 base_index field. */
3706
3707 void
3708 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3709 {
3710 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3711 tree orig_type = TREE_TYPE (fndecl);
3712 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3713
3714 /* The following test is an ugly hack, some functions simply don't have any
3715 arguments in their type. This is probably a bug but well... */
3716 bool care_for_types = (old_arg_types != NULL_TREE);
3717 bool last_parm_void;
3718 vec<tree> otypes;
3719 if (care_for_types)
3720 {
3721 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3722 == void_type_node);
3723 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3724 if (last_parm_void)
3725 gcc_assert (oparms.length () + 1 == otypes.length ());
3726 else
3727 gcc_assert (oparms.length () == otypes.length ());
3728 }
3729 else
3730 {
3731 last_parm_void = false;
3732 otypes.create (0);
3733 }
3734
3735 int len = adjustments.length ();
3736 tree *link = &DECL_ARGUMENTS (fndecl);
3737 tree new_arg_types = NULL;
3738 for (int i = 0; i < len; i++)
3739 {
3740 struct ipa_parm_adjustment *adj;
3741 gcc_assert (link);
3742
3743 adj = &adjustments[i];
3744 tree parm;
3745 if (adj->op == IPA_PARM_OP_NEW)
3746 parm = NULL;
3747 else
3748 parm = oparms[adj->base_index];
3749 adj->base = parm;
3750
3751 if (adj->op == IPA_PARM_OP_COPY)
3752 {
3753 if (care_for_types)
3754 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3755 new_arg_types);
3756 *link = parm;
3757 link = &DECL_CHAIN (parm);
3758 }
3759 else if (adj->op != IPA_PARM_OP_REMOVE)
3760 {
3761 tree new_parm;
3762 tree ptype;
3763
3764 if (adj->by_ref)
3765 ptype = build_pointer_type (adj->type);
3766 else
3767 {
3768 ptype = adj->type;
3769 if (is_gimple_reg_type (ptype))
3770 {
3771 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3772 if (TYPE_ALIGN (ptype) < malign)
3773 ptype = build_aligned_type (ptype, malign);
3774 }
3775 }
3776
3777 if (care_for_types)
3778 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3779
3780 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3781 ptype);
3782 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3783 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3784 DECL_ARTIFICIAL (new_parm) = 1;
3785 DECL_ARG_TYPE (new_parm) = ptype;
3786 DECL_CONTEXT (new_parm) = fndecl;
3787 TREE_USED (new_parm) = 1;
3788 DECL_IGNORED_P (new_parm) = 1;
3789 layout_decl (new_parm, 0);
3790
3791 if (adj->op == IPA_PARM_OP_NEW)
3792 adj->base = NULL;
3793 else
3794 adj->base = parm;
3795 adj->new_decl = new_parm;
3796
3797 *link = new_parm;
3798 link = &DECL_CHAIN (new_parm);
3799 }
3800 }
3801
3802 *link = NULL_TREE;
3803
3804 tree new_reversed = NULL;
3805 if (care_for_types)
3806 {
3807 new_reversed = nreverse (new_arg_types);
3808 if (last_parm_void)
3809 {
3810 if (new_reversed)
3811 TREE_CHAIN (new_arg_types) = void_list_node;
3812 else
3813 new_reversed = void_list_node;
3814 }
3815 }
3816
3817 /* Use copy_node to preserve as much as possible from original type
3818 (debug info, attribute lists etc.)
3819 Exception is METHOD_TYPEs must have THIS argument.
3820 When we are asked to remove it, we need to build new FUNCTION_TYPE
3821 instead. */
3822 tree new_type = NULL;
3823 if (TREE_CODE (orig_type) != METHOD_TYPE
3824 || (adjustments[0].op == IPA_PARM_OP_COPY
3825 && adjustments[0].base_index == 0))
3826 {
3827 new_type = build_distinct_type_copy (orig_type);
3828 TYPE_ARG_TYPES (new_type) = new_reversed;
3829 }
3830 else
3831 {
3832 new_type
3833 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3834 new_reversed));
3835 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3836 DECL_VINDEX (fndecl) = NULL_TREE;
3837 }
3838
3839 /* When signature changes, we need to clear builtin info. */
3840 if (DECL_BUILT_IN (fndecl))
3841 {
3842 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3843 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3844 }
3845
3846 /* This is a new type, not a copy of an old type. Need to reassociate
3847 variants. We can handle everything except the main variant lazily. */
3848 tree t = TYPE_MAIN_VARIANT (orig_type);
3849 if (orig_type != t)
3850 {
3851 TYPE_MAIN_VARIANT (new_type) = t;
3852 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3853 TYPE_NEXT_VARIANT (t) = new_type;
3854 }
3855 else
3856 {
3857 TYPE_MAIN_VARIANT (new_type) = new_type;
3858 TYPE_NEXT_VARIANT (new_type) = NULL;
3859 }
3860
3861 TREE_TYPE (fndecl) = new_type;
3862 DECL_VIRTUAL_P (fndecl) = 0;
3863 DECL_LANG_SPECIFIC (fndecl) = NULL;
3864 otypes.release ();
3865 oparms.release ();
3866 }
3867
3868 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3869 If this is a directly recursive call, CS must be NULL. Otherwise it must
3870 contain the corresponding call graph edge. */
3871
3872 void
3873 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3874 ipa_parm_adjustment_vec adjustments)
3875 {
3876 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3877 vec<tree> vargs;
3878 vec<tree, va_gc> **debug_args = NULL;
3879 gimple new_stmt;
3880 gimple_stmt_iterator gsi, prev_gsi;
3881 tree callee_decl;
3882 int i, len;
3883
3884 len = adjustments.length ();
3885 vargs.create (len);
3886 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3887 current_node->remove_stmt_references (stmt);
3888
3889 gsi = gsi_for_stmt (stmt);
3890 prev_gsi = gsi;
3891 gsi_prev (&prev_gsi);
3892 for (i = 0; i < len; i++)
3893 {
3894 struct ipa_parm_adjustment *adj;
3895
3896 adj = &adjustments[i];
3897
3898 if (adj->op == IPA_PARM_OP_COPY)
3899 {
3900 tree arg = gimple_call_arg (stmt, adj->base_index);
3901
3902 vargs.quick_push (arg);
3903 }
3904 else if (adj->op != IPA_PARM_OP_REMOVE)
3905 {
3906 tree expr, base, off;
3907 location_t loc;
3908 unsigned int deref_align = 0;
3909 bool deref_base = false;
3910
3911 /* We create a new parameter out of the value of the old one, we can
3912 do the following kind of transformations:
3913
3914 - A scalar passed by reference is converted to a scalar passed by
3915 value. (adj->by_ref is false and the type of the original
3916 actual argument is a pointer to a scalar).
3917
3918 - A part of an aggregate is passed instead of the whole aggregate.
3919 The part can be passed either by value or by reference, this is
3920 determined by value of adj->by_ref. Moreover, the code below
3921 handles both situations when the original aggregate is passed by
3922 value (its type is not a pointer) and when it is passed by
3923 reference (it is a pointer to an aggregate).
3924
3925 When the new argument is passed by reference (adj->by_ref is true)
3926 it must be a part of an aggregate and therefore we form it by
3927 simply taking the address of a reference inside the original
3928 aggregate. */
3929
3930 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3931 base = gimple_call_arg (stmt, adj->base_index);
3932 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3933 : EXPR_LOCATION (base);
3934
3935 if (TREE_CODE (base) != ADDR_EXPR
3936 && POINTER_TYPE_P (TREE_TYPE (base)))
3937 off = build_int_cst (adj->alias_ptr_type,
3938 adj->offset / BITS_PER_UNIT);
3939 else
3940 {
3941 HOST_WIDE_INT base_offset;
3942 tree prev_base;
3943 bool addrof;
3944
3945 if (TREE_CODE (base) == ADDR_EXPR)
3946 {
3947 base = TREE_OPERAND (base, 0);
3948 addrof = true;
3949 }
3950 else
3951 addrof = false;
3952 prev_base = base;
3953 base = get_addr_base_and_unit_offset (base, &base_offset);
3954 /* Aggregate arguments can have non-invariant addresses. */
3955 if (!base)
3956 {
3957 base = build_fold_addr_expr (prev_base);
3958 off = build_int_cst (adj->alias_ptr_type,
3959 adj->offset / BITS_PER_UNIT);
3960 }
3961 else if (TREE_CODE (base) == MEM_REF)
3962 {
3963 if (!addrof)
3964 {
3965 deref_base = true;
3966 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3967 }
3968 off = build_int_cst (adj->alias_ptr_type,
3969 base_offset
3970 + adj->offset / BITS_PER_UNIT);
3971 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3972 off);
3973 base = TREE_OPERAND (base, 0);
3974 }
3975 else
3976 {
3977 off = build_int_cst (adj->alias_ptr_type,
3978 base_offset
3979 + adj->offset / BITS_PER_UNIT);
3980 base = build_fold_addr_expr (base);
3981 }
3982 }
3983
3984 if (!adj->by_ref)
3985 {
3986 tree type = adj->type;
3987 unsigned int align;
3988 unsigned HOST_WIDE_INT misalign;
3989
3990 if (deref_base)
3991 {
3992 align = deref_align;
3993 misalign = 0;
3994 }
3995 else
3996 {
3997 get_pointer_alignment_1 (base, &align, &misalign);
3998 if (TYPE_ALIGN (type) > align)
3999 align = TYPE_ALIGN (type);
4000 }
4001 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4002 * BITS_PER_UNIT);
4003 misalign = misalign & (align - 1);
4004 if (misalign != 0)
4005 align = (misalign & -misalign);
4006 if (align < TYPE_ALIGN (type))
4007 type = build_aligned_type (type, align);
4008 base = force_gimple_operand_gsi (&gsi, base,
4009 true, NULL, true, GSI_SAME_STMT);
4010 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4011 /* If expr is not a valid gimple call argument emit
4012 a load into a temporary. */
4013 if (is_gimple_reg_type (TREE_TYPE (expr)))
4014 {
4015 gimple tem = gimple_build_assign (NULL_TREE, expr);
4016 if (gimple_in_ssa_p (cfun))
4017 {
4018 gimple_set_vuse (tem, gimple_vuse (stmt));
4019 expr = make_ssa_name (TREE_TYPE (expr), tem);
4020 }
4021 else
4022 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4023 gimple_assign_set_lhs (tem, expr);
4024 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4025 }
4026 }
4027 else
4028 {
4029 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4030 expr = build_fold_addr_expr (expr);
4031 expr = force_gimple_operand_gsi (&gsi, expr,
4032 true, NULL, true, GSI_SAME_STMT);
4033 }
4034 vargs.quick_push (expr);
4035 }
4036 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4037 {
4038 unsigned int ix;
4039 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4040 gimple def_temp;
4041
4042 arg = gimple_call_arg (stmt, adj->base_index);
4043 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4044 {
4045 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4046 continue;
4047 arg = fold_convert_loc (gimple_location (stmt),
4048 TREE_TYPE (origin), arg);
4049 }
4050 if (debug_args == NULL)
4051 debug_args = decl_debug_args_insert (callee_decl);
4052 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4053 if (ddecl == origin)
4054 {
4055 ddecl = (**debug_args)[ix + 1];
4056 break;
4057 }
4058 if (ddecl == NULL)
4059 {
4060 ddecl = make_node (DEBUG_EXPR_DECL);
4061 DECL_ARTIFICIAL (ddecl) = 1;
4062 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4063 DECL_MODE (ddecl) = DECL_MODE (origin);
4064
4065 vec_safe_push (*debug_args, origin);
4066 vec_safe_push (*debug_args, ddecl);
4067 }
4068 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4069 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4070 }
4071 }
4072
4073 if (dump_file && (dump_flags & TDF_DETAILS))
4074 {
4075 fprintf (dump_file, "replacing stmt:");
4076 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4077 }
4078
4079 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4080 vargs.release ();
4081 if (gimple_call_lhs (stmt))
4082 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4083
4084 gimple_set_block (new_stmt, gimple_block (stmt));
4085 if (gimple_has_location (stmt))
4086 gimple_set_location (new_stmt, gimple_location (stmt));
4087 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4088 gimple_call_copy_flags (new_stmt, stmt);
4089 if (gimple_in_ssa_p (cfun))
4090 {
4091 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4092 if (gimple_vdef (stmt))
4093 {
4094 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4095 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4096 }
4097 }
4098
4099 if (dump_file && (dump_flags & TDF_DETAILS))
4100 {
4101 fprintf (dump_file, "with stmt:");
4102 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4103 fprintf (dump_file, "\n");
4104 }
4105 gsi_replace (&gsi, new_stmt, true);
4106 if (cs)
4107 cgraph_set_call_stmt (cs, new_stmt);
4108 do
4109 {
4110 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
4111 gsi_prev (&gsi);
4112 }
4113 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4114 }
4115
4116 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4117 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4118 specifies whether the function should care about type incompatibility the
4119 current and new expressions. If it is false, the function will leave
4120 incompatibility issues to the caller. Return true iff the expression
4121 was modified. */
4122
4123 bool
4124 ipa_modify_expr (tree *expr, bool convert,
4125 ipa_parm_adjustment_vec adjustments)
4126 {
4127 struct ipa_parm_adjustment *cand
4128 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4129 if (!cand)
4130 return false;
4131
4132 tree src;
4133 if (cand->by_ref)
4134 src = build_simple_mem_ref (cand->new_decl);
4135 else
4136 src = cand->new_decl;
4137
4138 if (dump_file && (dump_flags & TDF_DETAILS))
4139 {
4140 fprintf (dump_file, "About to replace expr ");
4141 print_generic_expr (dump_file, *expr, 0);
4142 fprintf (dump_file, " with ");
4143 print_generic_expr (dump_file, src, 0);
4144 fprintf (dump_file, "\n");
4145 }
4146
4147 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4148 {
4149 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4150 *expr = vce;
4151 }
4152 else
4153 *expr = src;
4154 return true;
4155 }
4156
4157 /* If T is an SSA_NAME, return NULL if it is not a default def or
4158 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4159 the base variable is always returned, regardless if it is a default
4160 def. Return T if it is not an SSA_NAME. */
4161
4162 static tree
4163 get_ssa_base_param (tree t, bool ignore_default_def)
4164 {
4165 if (TREE_CODE (t) == SSA_NAME)
4166 {
4167 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4168 return SSA_NAME_VAR (t);
4169 else
4170 return NULL_TREE;
4171 }
4172 return t;
4173 }
4174
4175 /* Given an expression, return an adjustment entry specifying the
4176 transformation to be done on EXPR. If no suitable adjustment entry
4177 was found, returns NULL.
4178
4179 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4180 default def, otherwise bail on them.
4181
4182 If CONVERT is non-NULL, this function will set *CONVERT if the
4183 expression provided is a component reference. ADJUSTMENTS is the
4184 adjustments vector. */
4185
4186 ipa_parm_adjustment *
4187 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4188 ipa_parm_adjustment_vec adjustments,
4189 bool ignore_default_def)
4190 {
4191 if (TREE_CODE (**expr) == BIT_FIELD_REF
4192 || TREE_CODE (**expr) == IMAGPART_EXPR
4193 || TREE_CODE (**expr) == REALPART_EXPR)
4194 {
4195 *expr = &TREE_OPERAND (**expr, 0);
4196 if (convert)
4197 *convert = true;
4198 }
4199
4200 HOST_WIDE_INT offset, size, max_size;
4201 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4202 if (!base || size == -1 || max_size == -1)
4203 return NULL;
4204
4205 if (TREE_CODE (base) == MEM_REF)
4206 {
4207 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4208 base = TREE_OPERAND (base, 0);
4209 }
4210
4211 base = get_ssa_base_param (base, ignore_default_def);
4212 if (!base || TREE_CODE (base) != PARM_DECL)
4213 return NULL;
4214
4215 struct ipa_parm_adjustment *cand = NULL;
4216 unsigned int len = adjustments.length ();
4217 for (unsigned i = 0; i < len; i++)
4218 {
4219 struct ipa_parm_adjustment *adj = &adjustments[i];
4220
4221 if (adj->base == base
4222 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4223 {
4224 cand = adj;
4225 break;
4226 }
4227 }
4228
4229 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4230 return NULL;
4231 return cand;
4232 }
4233
4234 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4235
4236 static bool
4237 index_in_adjustments_multiple_times_p (int base_index,
4238 ipa_parm_adjustment_vec adjustments)
4239 {
4240 int i, len = adjustments.length ();
4241 bool one = false;
4242
4243 for (i = 0; i < len; i++)
4244 {
4245 struct ipa_parm_adjustment *adj;
4246 adj = &adjustments[i];
4247
4248 if (adj->base_index == base_index)
4249 {
4250 if (one)
4251 return true;
4252 else
4253 one = true;
4254 }
4255 }
4256 return false;
4257 }
4258
4259
4260 /* Return adjustments that should have the same effect on function parameters
4261 and call arguments as if they were first changed according to adjustments in
4262 INNER and then by adjustments in OUTER. */
4263
4264 ipa_parm_adjustment_vec
4265 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4266 ipa_parm_adjustment_vec outer)
4267 {
4268 int i, outlen = outer.length ();
4269 int inlen = inner.length ();
4270 int removals = 0;
4271 ipa_parm_adjustment_vec adjustments, tmp;
4272
4273 tmp.create (inlen);
4274 for (i = 0; i < inlen; i++)
4275 {
4276 struct ipa_parm_adjustment *n;
4277 n = &inner[i];
4278
4279 if (n->op == IPA_PARM_OP_REMOVE)
4280 removals++;
4281 else
4282 {
4283 /* FIXME: Handling of new arguments are not implemented yet. */
4284 gcc_assert (n->op != IPA_PARM_OP_NEW);
4285 tmp.quick_push (*n);
4286 }
4287 }
4288
4289 adjustments.create (outlen + removals);
4290 for (i = 0; i < outlen; i++)
4291 {
4292 struct ipa_parm_adjustment r;
4293 struct ipa_parm_adjustment *out = &outer[i];
4294 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4295
4296 memset (&r, 0, sizeof (r));
4297 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4298 if (out->op == IPA_PARM_OP_REMOVE)
4299 {
4300 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4301 {
4302 r.op = IPA_PARM_OP_REMOVE;
4303 adjustments.quick_push (r);
4304 }
4305 continue;
4306 }
4307 else
4308 {
4309 /* FIXME: Handling of new arguments are not implemented yet. */
4310 gcc_assert (out->op != IPA_PARM_OP_NEW);
4311 }
4312
4313 r.base_index = in->base_index;
4314 r.type = out->type;
4315
4316 /* FIXME: Create nonlocal value too. */
4317
4318 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4319 r.op = IPA_PARM_OP_COPY;
4320 else if (in->op == IPA_PARM_OP_COPY)
4321 r.offset = out->offset;
4322 else if (out->op == IPA_PARM_OP_COPY)
4323 r.offset = in->offset;
4324 else
4325 r.offset = in->offset + out->offset;
4326 adjustments.quick_push (r);
4327 }
4328
4329 for (i = 0; i < inlen; i++)
4330 {
4331 struct ipa_parm_adjustment *n = &inner[i];
4332
4333 if (n->op == IPA_PARM_OP_REMOVE)
4334 adjustments.quick_push (*n);
4335 }
4336
4337 tmp.release ();
4338 return adjustments;
4339 }
4340
4341 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4342 friendly way, assuming they are meant to be applied to FNDECL. */
4343
4344 void
4345 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4346 tree fndecl)
4347 {
4348 int i, len = adjustments.length ();
4349 bool first = true;
4350 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4351
4352 fprintf (file, "IPA param adjustments: ");
4353 for (i = 0; i < len; i++)
4354 {
4355 struct ipa_parm_adjustment *adj;
4356 adj = &adjustments[i];
4357
4358 if (!first)
4359 fprintf (file, " ");
4360 else
4361 first = false;
4362
4363 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4364 print_generic_expr (file, parms[adj->base_index], 0);
4365 if (adj->base)
4366 {
4367 fprintf (file, ", base: ");
4368 print_generic_expr (file, adj->base, 0);
4369 }
4370 if (adj->new_decl)
4371 {
4372 fprintf (file, ", new_decl: ");
4373 print_generic_expr (file, adj->new_decl, 0);
4374 }
4375 if (adj->new_ssa_base)
4376 {
4377 fprintf (file, ", new_ssa_base: ");
4378 print_generic_expr (file, adj->new_ssa_base, 0);
4379 }
4380
4381 if (adj->op == IPA_PARM_OP_COPY)
4382 fprintf (file, ", copy_param");
4383 else if (adj->op == IPA_PARM_OP_REMOVE)
4384 fprintf (file, ", remove_param");
4385 else
4386 fprintf (file, ", offset %li", (long) adj->offset);
4387 if (adj->by_ref)
4388 fprintf (file, ", by_ref");
4389 print_node_brief (file, ", type: ", adj->type, 0);
4390 fprintf (file, "\n");
4391 }
4392 parms.release ();
4393 }
4394
4395 /* Dump the AV linked list. */
4396
4397 void
4398 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4399 {
4400 bool comma = false;
4401 fprintf (f, " Aggregate replacements:");
4402 for (; av; av = av->next)
4403 {
4404 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4405 av->index, av->offset);
4406 print_generic_expr (f, av->value, 0);
4407 comma = true;
4408 }
4409 fprintf (f, "\n");
4410 }
4411
4412 /* Stream out jump function JUMP_FUNC to OB. */
4413
4414 static void
4415 ipa_write_jump_function (struct output_block *ob,
4416 struct ipa_jump_func *jump_func)
4417 {
4418 struct ipa_agg_jf_item *item;
4419 struct bitpack_d bp;
4420 int i, count;
4421
4422 streamer_write_uhwi (ob, jump_func->type);
4423 switch (jump_func->type)
4424 {
4425 case IPA_JF_UNKNOWN:
4426 break;
4427 case IPA_JF_KNOWN_TYPE:
4428 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4429 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4430 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4431 break;
4432 case IPA_JF_CONST:
4433 gcc_assert (
4434 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4435 stream_write_tree (ob, jump_func->value.constant.value, true);
4436 break;
4437 case IPA_JF_PASS_THROUGH:
4438 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4439 if (jump_func->value.pass_through.operation == NOP_EXPR)
4440 {
4441 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4442 bp = bitpack_create (ob->main_stream);
4443 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4444 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4445 streamer_write_bitpack (&bp);
4446 }
4447 else
4448 {
4449 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4450 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4451 }
4452 break;
4453 case IPA_JF_ANCESTOR:
4454 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4455 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4456 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4457 bp = bitpack_create (ob->main_stream);
4458 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4459 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4460 streamer_write_bitpack (&bp);
4461 break;
4462 }
4463
4464 count = vec_safe_length (jump_func->agg.items);
4465 streamer_write_uhwi (ob, count);
4466 if (count)
4467 {
4468 bp = bitpack_create (ob->main_stream);
4469 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4470 streamer_write_bitpack (&bp);
4471 }
4472
4473 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4474 {
4475 streamer_write_uhwi (ob, item->offset);
4476 stream_write_tree (ob, item->value, true);
4477 }
4478 }
4479
4480 /* Read in jump function JUMP_FUNC from IB. */
4481
4482 static void
4483 ipa_read_jump_function (struct lto_input_block *ib,
4484 struct ipa_jump_func *jump_func,
4485 struct cgraph_edge *cs,
4486 struct data_in *data_in)
4487 {
4488 enum jump_func_type jftype;
4489 enum tree_code operation;
4490 int i, count;
4491
4492 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4493 switch (jftype)
4494 {
4495 case IPA_JF_UNKNOWN:
4496 jump_func->type = IPA_JF_UNKNOWN;
4497 break;
4498 case IPA_JF_KNOWN_TYPE:
4499 {
4500 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4501 tree base_type = stream_read_tree (ib, data_in);
4502 tree component_type = stream_read_tree (ib, data_in);
4503
4504 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4505 break;
4506 }
4507 case IPA_JF_CONST:
4508 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4509 break;
4510 case IPA_JF_PASS_THROUGH:
4511 operation = (enum tree_code) streamer_read_uhwi (ib);
4512 if (operation == NOP_EXPR)
4513 {
4514 int formal_id = streamer_read_uhwi (ib);
4515 struct bitpack_d bp = streamer_read_bitpack (ib);
4516 bool agg_preserved = bp_unpack_value (&bp, 1);
4517 bool type_preserved = bp_unpack_value (&bp, 1);
4518 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4519 type_preserved);
4520 }
4521 else
4522 {
4523 tree operand = stream_read_tree (ib, data_in);
4524 int formal_id = streamer_read_uhwi (ib);
4525 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4526 operation);
4527 }
4528 break;
4529 case IPA_JF_ANCESTOR:
4530 {
4531 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4532 tree type = stream_read_tree (ib, data_in);
4533 int formal_id = streamer_read_uhwi (ib);
4534 struct bitpack_d bp = streamer_read_bitpack (ib);
4535 bool agg_preserved = bp_unpack_value (&bp, 1);
4536 bool type_preserved = bp_unpack_value (&bp, 1);
4537
4538 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4539 type_preserved);
4540 break;
4541 }
4542 }
4543
4544 count = streamer_read_uhwi (ib);
4545 vec_alloc (jump_func->agg.items, count);
4546 if (count)
4547 {
4548 struct bitpack_d bp = streamer_read_bitpack (ib);
4549 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4550 }
4551 for (i = 0; i < count; i++)
4552 {
4553 struct ipa_agg_jf_item item;
4554 item.offset = streamer_read_uhwi (ib);
4555 item.value = stream_read_tree (ib, data_in);
4556 jump_func->agg.items->quick_push (item);
4557 }
4558 }
4559
4560 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4561 relevant to indirect inlining to OB. */
4562
4563 static void
4564 ipa_write_indirect_edge_info (struct output_block *ob,
4565 struct cgraph_edge *cs)
4566 {
4567 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4568 struct bitpack_d bp;
4569
4570 streamer_write_hwi (ob, ii->param_index);
4571 streamer_write_hwi (ob, ii->offset);
4572 bp = bitpack_create (ob->main_stream);
4573 bp_pack_value (&bp, ii->polymorphic, 1);
4574 bp_pack_value (&bp, ii->agg_contents, 1);
4575 bp_pack_value (&bp, ii->member_ptr, 1);
4576 bp_pack_value (&bp, ii->by_ref, 1);
4577 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4578 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4579 streamer_write_bitpack (&bp);
4580
4581 if (ii->polymorphic)
4582 {
4583 streamer_write_hwi (ob, ii->otr_token);
4584 stream_write_tree (ob, ii->otr_type, true);
4585 stream_write_tree (ob, ii->outer_type, true);
4586 }
4587 }
4588
4589 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4590 relevant to indirect inlining from IB. */
4591
4592 static void
4593 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4594 struct data_in *data_in ATTRIBUTE_UNUSED,
4595 struct cgraph_edge *cs)
4596 {
4597 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4598 struct bitpack_d bp;
4599
4600 ii->param_index = (int) streamer_read_hwi (ib);
4601 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4602 bp = streamer_read_bitpack (ib);
4603 ii->polymorphic = bp_unpack_value (&bp, 1);
4604 ii->agg_contents = bp_unpack_value (&bp, 1);
4605 ii->member_ptr = bp_unpack_value (&bp, 1);
4606 ii->by_ref = bp_unpack_value (&bp, 1);
4607 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4608 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4609 if (ii->polymorphic)
4610 {
4611 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4612 ii->otr_type = stream_read_tree (ib, data_in);
4613 ii->outer_type = stream_read_tree (ib, data_in);
4614 }
4615 }
4616
4617 /* Stream out NODE info to OB. */
4618
4619 static void
4620 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4621 {
4622 int node_ref;
4623 lto_symtab_encoder_t encoder;
4624 struct ipa_node_params *info = IPA_NODE_REF (node);
4625 int j;
4626 struct cgraph_edge *e;
4627 struct bitpack_d bp;
4628
4629 encoder = ob->decl_state->symtab_node_encoder;
4630 node_ref = lto_symtab_encoder_encode (encoder, node);
4631 streamer_write_uhwi (ob, node_ref);
4632
4633 streamer_write_uhwi (ob, ipa_get_param_count (info));
4634 for (j = 0; j < ipa_get_param_count (info); j++)
4635 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4636 bp = bitpack_create (ob->main_stream);
4637 gcc_assert (info->analysis_done
4638 || ipa_get_param_count (info) == 0);
4639 gcc_assert (!info->node_enqueued);
4640 gcc_assert (!info->ipcp_orig_node);
4641 for (j = 0; j < ipa_get_param_count (info); j++)
4642 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4643 streamer_write_bitpack (&bp);
4644 for (j = 0; j < ipa_get_param_count (info); j++)
4645 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4646 for (e = node->callees; e; e = e->next_callee)
4647 {
4648 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4649
4650 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4651 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4652 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4653 }
4654 for (e = node->indirect_calls; e; e = e->next_callee)
4655 {
4656 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4657
4658 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4659 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4660 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4661 ipa_write_indirect_edge_info (ob, e);
4662 }
4663 }
4664
4665 /* Stream in NODE info from IB. */
4666
4667 static void
4668 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4669 struct data_in *data_in)
4670 {
4671 struct ipa_node_params *info = IPA_NODE_REF (node);
4672 int k;
4673 struct cgraph_edge *e;
4674 struct bitpack_d bp;
4675
4676 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4677
4678 for (k = 0; k < ipa_get_param_count (info); k++)
4679 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4680
4681 bp = streamer_read_bitpack (ib);
4682 if (ipa_get_param_count (info) != 0)
4683 info->analysis_done = true;
4684 info->node_enqueued = false;
4685 for (k = 0; k < ipa_get_param_count (info); k++)
4686 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4687 for (k = 0; k < ipa_get_param_count (info); k++)
4688 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4689 for (e = node->callees; e; e = e->next_callee)
4690 {
4691 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4692 int count = streamer_read_uhwi (ib);
4693
4694 if (!count)
4695 continue;
4696 vec_safe_grow_cleared (args->jump_functions, count);
4697
4698 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4699 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4700 data_in);
4701 }
4702 for (e = node->indirect_calls; e; e = e->next_callee)
4703 {
4704 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4705 int count = streamer_read_uhwi (ib);
4706
4707 if (count)
4708 {
4709 vec_safe_grow_cleared (args->jump_functions, count);
4710 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4711 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4712 data_in);
4713 }
4714 ipa_read_indirect_edge_info (ib, data_in, e);
4715 }
4716 }
4717
4718 /* Write jump functions for nodes in SET. */
4719
4720 void
4721 ipa_prop_write_jump_functions (void)
4722 {
4723 struct cgraph_node *node;
4724 struct output_block *ob;
4725 unsigned int count = 0;
4726 lto_symtab_encoder_iterator lsei;
4727 lto_symtab_encoder_t encoder;
4728
4729
4730 if (!ipa_node_params_vector.exists ())
4731 return;
4732
4733 ob = create_output_block (LTO_section_jump_functions);
4734 encoder = ob->decl_state->symtab_node_encoder;
4735 ob->cgraph_node = NULL;
4736 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4737 lsei_next_function_in_partition (&lsei))
4738 {
4739 node = lsei_cgraph_node (lsei);
4740 if (cgraph_function_with_gimple_body_p (node)
4741 && IPA_NODE_REF (node) != NULL)
4742 count++;
4743 }
4744
4745 streamer_write_uhwi (ob, count);
4746
4747 /* Process all of the functions. */
4748 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4749 lsei_next_function_in_partition (&lsei))
4750 {
4751 node = lsei_cgraph_node (lsei);
4752 if (cgraph_function_with_gimple_body_p (node)
4753 && IPA_NODE_REF (node) != NULL)
4754 ipa_write_node_info (ob, node);
4755 }
4756 streamer_write_char_stream (ob->main_stream, 0);
4757 produce_asm (ob, NULL);
4758 destroy_output_block (ob);
4759 }
4760
4761 /* Read section in file FILE_DATA of length LEN with data DATA. */
4762
4763 static void
4764 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4765 size_t len)
4766 {
4767 const struct lto_function_header *header =
4768 (const struct lto_function_header *) data;
4769 const int cfg_offset = sizeof (struct lto_function_header);
4770 const int main_offset = cfg_offset + header->cfg_size;
4771 const int string_offset = main_offset + header->main_size;
4772 struct data_in *data_in;
4773 struct lto_input_block ib_main;
4774 unsigned int i;
4775 unsigned int count;
4776
4777 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4778 header->main_size);
4779
4780 data_in =
4781 lto_data_in_create (file_data, (const char *) data + string_offset,
4782 header->string_size, vNULL);
4783 count = streamer_read_uhwi (&ib_main);
4784
4785 for (i = 0; i < count; i++)
4786 {
4787 unsigned int index;
4788 struct cgraph_node *node;
4789 lto_symtab_encoder_t encoder;
4790
4791 index = streamer_read_uhwi (&ib_main);
4792 encoder = file_data->symtab_node_encoder;
4793 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4794 gcc_assert (node->definition);
4795 ipa_read_node_info (&ib_main, node, data_in);
4796 }
4797 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4798 len);
4799 lto_data_in_delete (data_in);
4800 }
4801
4802 /* Read ipcp jump functions. */
4803
4804 void
4805 ipa_prop_read_jump_functions (void)
4806 {
4807 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4808 struct lto_file_decl_data *file_data;
4809 unsigned int j = 0;
4810
4811 ipa_check_create_node_params ();
4812 ipa_check_create_edge_args ();
4813 ipa_register_cgraph_hooks ();
4814
4815 while ((file_data = file_data_vec[j++]))
4816 {
4817 size_t len;
4818 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4819
4820 if (data)
4821 ipa_prop_read_section (file_data, data, len);
4822 }
4823 }
4824
4825 /* After merging units, we can get mismatch in argument counts.
4826 Also decl merging might've rendered parameter lists obsolete.
4827 Also compute called_with_variable_arg info. */
4828
4829 void
4830 ipa_update_after_lto_read (void)
4831 {
4832 ipa_check_create_node_params ();
4833 ipa_check_create_edge_args ();
4834 }
4835
4836 void
4837 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4838 {
4839 int node_ref;
4840 unsigned int count = 0;
4841 lto_symtab_encoder_t encoder;
4842 struct ipa_agg_replacement_value *aggvals, *av;
4843
4844 aggvals = ipa_get_agg_replacements_for_node (node);
4845 encoder = ob->decl_state->symtab_node_encoder;
4846 node_ref = lto_symtab_encoder_encode (encoder, node);
4847 streamer_write_uhwi (ob, node_ref);
4848
4849 for (av = aggvals; av; av = av->next)
4850 count++;
4851 streamer_write_uhwi (ob, count);
4852
4853 for (av = aggvals; av; av = av->next)
4854 {
4855 struct bitpack_d bp;
4856
4857 streamer_write_uhwi (ob, av->offset);
4858 streamer_write_uhwi (ob, av->index);
4859 stream_write_tree (ob, av->value, true);
4860
4861 bp = bitpack_create (ob->main_stream);
4862 bp_pack_value (&bp, av->by_ref, 1);
4863 streamer_write_bitpack (&bp);
4864 }
4865 }
4866
4867 /* Stream in the aggregate value replacement chain for NODE from IB. */
4868
4869 static void
4870 read_agg_replacement_chain (struct lto_input_block *ib,
4871 struct cgraph_node *node,
4872 struct data_in *data_in)
4873 {
4874 struct ipa_agg_replacement_value *aggvals = NULL;
4875 unsigned int count, i;
4876
4877 count = streamer_read_uhwi (ib);
4878 for (i = 0; i <count; i++)
4879 {
4880 struct ipa_agg_replacement_value *av;
4881 struct bitpack_d bp;
4882
4883 av = ggc_alloc<ipa_agg_replacement_value> ();
4884 av->offset = streamer_read_uhwi (ib);
4885 av->index = streamer_read_uhwi (ib);
4886 av->value = stream_read_tree (ib, data_in);
4887 bp = streamer_read_bitpack (ib);
4888 av->by_ref = bp_unpack_value (&bp, 1);
4889 av->next = aggvals;
4890 aggvals = av;
4891 }
4892 ipa_set_node_agg_value_chain (node, aggvals);
4893 }
4894
4895 /* Write all aggregate replacement for nodes in set. */
4896
4897 void
4898 ipa_prop_write_all_agg_replacement (void)
4899 {
4900 struct cgraph_node *node;
4901 struct output_block *ob;
4902 unsigned int count = 0;
4903 lto_symtab_encoder_iterator lsei;
4904 lto_symtab_encoder_t encoder;
4905
4906 if (!ipa_node_agg_replacements)
4907 return;
4908
4909 ob = create_output_block (LTO_section_ipcp_transform);
4910 encoder = ob->decl_state->symtab_node_encoder;
4911 ob->cgraph_node = NULL;
4912 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4913 lsei_next_function_in_partition (&lsei))
4914 {
4915 node = lsei_cgraph_node (lsei);
4916 if (cgraph_function_with_gimple_body_p (node)
4917 && ipa_get_agg_replacements_for_node (node) != NULL)
4918 count++;
4919 }
4920
4921 streamer_write_uhwi (ob, count);
4922
4923 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4924 lsei_next_function_in_partition (&lsei))
4925 {
4926 node = lsei_cgraph_node (lsei);
4927 if (cgraph_function_with_gimple_body_p (node)
4928 && ipa_get_agg_replacements_for_node (node) != NULL)
4929 write_agg_replacement_chain (ob, node);
4930 }
4931 streamer_write_char_stream (ob->main_stream, 0);
4932 produce_asm (ob, NULL);
4933 destroy_output_block (ob);
4934 }
4935
4936 /* Read replacements section in file FILE_DATA of length LEN with data
4937 DATA. */
4938
4939 static void
4940 read_replacements_section (struct lto_file_decl_data *file_data,
4941 const char *data,
4942 size_t len)
4943 {
4944 const struct lto_function_header *header =
4945 (const struct lto_function_header *) data;
4946 const int cfg_offset = sizeof (struct lto_function_header);
4947 const int main_offset = cfg_offset + header->cfg_size;
4948 const int string_offset = main_offset + header->main_size;
4949 struct data_in *data_in;
4950 struct lto_input_block ib_main;
4951 unsigned int i;
4952 unsigned int count;
4953
4954 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4955 header->main_size);
4956
4957 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4958 header->string_size, vNULL);
4959 count = streamer_read_uhwi (&ib_main);
4960
4961 for (i = 0; i < count; i++)
4962 {
4963 unsigned int index;
4964 struct cgraph_node *node;
4965 lto_symtab_encoder_t encoder;
4966
4967 index = streamer_read_uhwi (&ib_main);
4968 encoder = file_data->symtab_node_encoder;
4969 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4970 gcc_assert (node->definition);
4971 read_agg_replacement_chain (&ib_main, node, data_in);
4972 }
4973 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4974 len);
4975 lto_data_in_delete (data_in);
4976 }
4977
4978 /* Read IPA-CP aggregate replacements. */
4979
4980 void
4981 ipa_prop_read_all_agg_replacement (void)
4982 {
4983 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4984 struct lto_file_decl_data *file_data;
4985 unsigned int j = 0;
4986
4987 while ((file_data = file_data_vec[j++]))
4988 {
4989 size_t len;
4990 const char *data = lto_get_section_data (file_data,
4991 LTO_section_ipcp_transform,
4992 NULL, &len);
4993 if (data)
4994 read_replacements_section (file_data, data, len);
4995 }
4996 }
4997
4998 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4999 NODE. */
5000
5001 static void
5002 adjust_agg_replacement_values (struct cgraph_node *node,
5003 struct ipa_agg_replacement_value *aggval)
5004 {
5005 struct ipa_agg_replacement_value *v;
5006 int i, c = 0, d = 0, *adj;
5007
5008 if (!node->clone.combined_args_to_skip)
5009 return;
5010
5011 for (v = aggval; v; v = v->next)
5012 {
5013 gcc_assert (v->index >= 0);
5014 if (c < v->index)
5015 c = v->index;
5016 }
5017 c++;
5018
5019 adj = XALLOCAVEC (int, c);
5020 for (i = 0; i < c; i++)
5021 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5022 {
5023 adj[i] = -1;
5024 d++;
5025 }
5026 else
5027 adj[i] = i - d;
5028
5029 for (v = aggval; v; v = v->next)
5030 v->index = adj[v->index];
5031 }
5032
5033 /* Dominator walker driving the ipcp modification phase. */
5034
5035 class ipcp_modif_dom_walker : public dom_walker
5036 {
5037 public:
5038 ipcp_modif_dom_walker (struct func_body_info *fbi,
5039 vec<ipa_param_descriptor> descs,
5040 struct ipa_agg_replacement_value *av,
5041 bool *sc, bool *cc)
5042 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5043 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5044
5045 virtual void before_dom_children (basic_block);
5046
5047 private:
5048 struct func_body_info *m_fbi;
5049 vec<ipa_param_descriptor> m_descriptors;
5050 struct ipa_agg_replacement_value *m_aggval;
5051 bool *m_something_changed, *m_cfg_changed;
5052 };
5053
5054 void
5055 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5056 {
5057 gimple_stmt_iterator gsi;
5058 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5059 {
5060 struct ipa_agg_replacement_value *v;
5061 gimple stmt = gsi_stmt (gsi);
5062 tree rhs, val, t;
5063 HOST_WIDE_INT offset, size;
5064 int index;
5065 bool by_ref, vce;
5066
5067 if (!gimple_assign_load_p (stmt))
5068 continue;
5069 rhs = gimple_assign_rhs1 (stmt);
5070 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5071 continue;
5072
5073 vce = false;
5074 t = rhs;
5075 while (handled_component_p (t))
5076 {
5077 /* V_C_E can do things like convert an array of integers to one
5078 bigger integer and similar things we do not handle below. */
5079 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5080 {
5081 vce = true;
5082 break;
5083 }
5084 t = TREE_OPERAND (t, 0);
5085 }
5086 if (vce)
5087 continue;
5088
5089 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5090 &offset, &size, &by_ref))
5091 continue;
5092 for (v = m_aggval; v; v = v->next)
5093 if (v->index == index
5094 && v->offset == offset)
5095 break;
5096 if (!v
5097 || v->by_ref != by_ref
5098 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5099 continue;
5100
5101 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5102 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5103 {
5104 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5105 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5106 else if (TYPE_SIZE (TREE_TYPE (rhs))
5107 == TYPE_SIZE (TREE_TYPE (v->value)))
5108 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5109 else
5110 {
5111 if (dump_file)
5112 {
5113 fprintf (dump_file, " const ");
5114 print_generic_expr (dump_file, v->value, 0);
5115 fprintf (dump_file, " can't be converted to type of ");
5116 print_generic_expr (dump_file, rhs, 0);
5117 fprintf (dump_file, "\n");
5118 }
5119 continue;
5120 }
5121 }
5122 else
5123 val = v->value;
5124
5125 if (dump_file && (dump_flags & TDF_DETAILS))
5126 {
5127 fprintf (dump_file, "Modifying stmt:\n ");
5128 print_gimple_stmt (dump_file, stmt, 0, 0);
5129 }
5130 gimple_assign_set_rhs_from_tree (&gsi, val);
5131 update_stmt (stmt);
5132
5133 if (dump_file && (dump_flags & TDF_DETAILS))
5134 {
5135 fprintf (dump_file, "into:\n ");
5136 print_gimple_stmt (dump_file, stmt, 0, 0);
5137 fprintf (dump_file, "\n");
5138 }
5139
5140 *m_something_changed = true;
5141 if (maybe_clean_eh_stmt (stmt)
5142 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5143 *m_cfg_changed = true;
5144 }
5145
5146 }
5147
5148 /* IPCP transformation phase doing propagation of aggregate values. */
5149
5150 unsigned int
5151 ipcp_transform_function (struct cgraph_node *node)
5152 {
5153 vec<ipa_param_descriptor> descriptors = vNULL;
5154 struct func_body_info fbi;
5155 struct ipa_agg_replacement_value *aggval;
5156 int param_count;
5157 bool cfg_changed = false, something_changed = false;
5158
5159 gcc_checking_assert (cfun);
5160 gcc_checking_assert (current_function_decl);
5161
5162 if (dump_file)
5163 fprintf (dump_file, "Modification phase of node %s/%i\n",
5164 node->name (), node->order);
5165
5166 aggval = ipa_get_agg_replacements_for_node (node);
5167 if (!aggval)
5168 return 0;
5169 param_count = count_formal_params (node->decl);
5170 if (param_count == 0)
5171 return 0;
5172 adjust_agg_replacement_values (node, aggval);
5173 if (dump_file)
5174 ipa_dump_agg_replacement_values (dump_file, aggval);
5175
5176 fbi.node = node;
5177 fbi.info = NULL;
5178 fbi.bb_infos = vNULL;
5179 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5180 fbi.param_count = param_count;
5181 fbi.aa_walked = 0;
5182
5183 descriptors.safe_grow_cleared (param_count);
5184 ipa_populate_param_decls (node, descriptors);
5185 calculate_dominance_info (CDI_DOMINATORS);
5186 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5187 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5188
5189 int i;
5190 struct ipa_bb_info *bi;
5191 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5192 free_ipa_bb_info (bi);
5193 fbi.bb_infos.release ();
5194 free_dominance_info (CDI_DOMINATORS);
5195 (*ipa_node_agg_replacements)[node->uid] = NULL;
5196 descriptors.release ();
5197
5198 if (!something_changed)
5199 return 0;
5200 else if (cfg_changed)
5201 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5202 else
5203 return TODO_update_ssa_only_virtuals;
5204 }