cgraph.h (struct indirect_call_info): Add IN_POLYMORPHIC_CDTOR
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "dbgcnt.h"
63 #include "domwalk.h"
64 #include "builtins.h"
65 #include "calls.h"
66
67 /* Intermediate information that we get from alias analysis about a particular
68 parameter in a particular basic_block. When a parameter or the memory it
69 references is marked modified, we use that information in all dominatd
70 blocks without cosulting alias analysis oracle. */
71
72 struct param_aa_status
73 {
74 /* Set when this structure contains meaningful information. If not, the
75 structure describing a dominating BB should be used instead. */
76 bool valid;
77
78 /* Whether we have seen something which might have modified the data in
79 question. PARM is for the parameter itself, REF is for data it points to
80 but using the alias type of individual accesses and PT is the same thing
81 but for computing aggregate pass-through functions using a very inclusive
82 ao_ref. */
83 bool parm_modified, ref_modified, pt_modified;
84 };
85
86 /* Information related to a given BB that used only when looking at function
87 body. */
88
89 struct ipa_bb_info
90 {
91 /* Call graph edges going out of this BB. */
92 vec<cgraph_edge *> cg_edges;
93 /* Alias analysis statuses of each formal parameter at this bb. */
94 vec<param_aa_status> param_aa_statuses;
95 };
96
97 /* Structure with global information that is only used when looking at function
98 body. */
99
100 struct func_body_info
101 {
102 /* The node that is being analyzed. */
103 cgraph_node *node;
104
105 /* Its info. */
106 struct ipa_node_params *info;
107
108 /* Information about individual BBs. */
109 vec<ipa_bb_info> bb_infos;
110
111 /* Number of parameters. */
112 int param_count;
113
114 /* Number of statements already walked by when analyzing this function. */
115 unsigned int aa_walked;
116 };
117
118 /* Vector where the parameter infos are actually stored. */
119 vec<ipa_node_params> ipa_node_params_vector;
120 /* Vector of known aggregate values in cloned nodes. */
121 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
122 /* Vector where the parameter infos are actually stored. */
123 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
124
125 /* Holders of ipa cgraph hooks: */
126 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
127 static struct cgraph_node_hook_list *node_removal_hook_holder;
128 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
129 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
130 static struct cgraph_node_hook_list *function_insertion_hook_holder;
131
132 /* Description of a reference to an IPA constant. */
133 struct ipa_cst_ref_desc
134 {
135 /* Edge that corresponds to the statement which took the reference. */
136 struct cgraph_edge *cs;
137 /* Linked list of duplicates created when call graph edges are cloned. */
138 struct ipa_cst_ref_desc *next_duplicate;
139 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
140 if out of control. */
141 int refcount;
142 };
143
144 /* Allocation pool for reference descriptions. */
145
146 static alloc_pool ipa_refdesc_pool;
147
148 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
149 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
150
151 static bool
152 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
153 {
154 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
155 struct cl_optimization *os;
156
157 if (!fs_opts)
158 return false;
159 os = TREE_OPTIMIZATION (fs_opts);
160 return !os->x_optimize || !os->x_flag_ipa_cp;
161 }
162
163 /* Return index of the formal whose tree is PTREE in function which corresponds
164 to INFO. */
165
166 static int
167 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
168 {
169 int i, count;
170
171 count = descriptors.length ();
172 for (i = 0; i < count; i++)
173 if (descriptors[i].decl == ptree)
174 return i;
175
176 return -1;
177 }
178
179 /* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
181
182 int
183 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
184 {
185 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
186 }
187
188 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
189 NODE. */
190
191 static void
192 ipa_populate_param_decls (struct cgraph_node *node,
193 vec<ipa_param_descriptor> &descriptors)
194 {
195 tree fndecl;
196 tree fnargs;
197 tree parm;
198 int param_num;
199
200 fndecl = node->decl;
201 gcc_assert (gimple_has_body_p (fndecl));
202 fnargs = DECL_ARGUMENTS (fndecl);
203 param_num = 0;
204 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
205 {
206 descriptors[param_num].decl = parm;
207 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
208 true);
209 param_num++;
210 }
211 }
212
213 /* Return how many formal parameters FNDECL has. */
214
215 int
216 count_formal_params (tree fndecl)
217 {
218 tree parm;
219 int count = 0;
220 gcc_assert (gimple_has_body_p (fndecl));
221
222 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
223 count++;
224
225 return count;
226 }
227
228 /* Return the declaration of Ith formal parameter of the function corresponding
229 to INFO. Note there is no setter function as this array is built just once
230 using ipa_initialize_node_params. */
231
232 void
233 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
234 {
235 fprintf (file, "param #%i", i);
236 if (info->descriptors[i].decl)
237 {
238 fprintf (file, " ");
239 print_generic_expr (file, info->descriptors[i].decl, 0);
240 }
241 }
242
243 /* Initialize the ipa_node_params structure associated with NODE
244 to hold PARAM_COUNT parameters. */
245
246 void
247 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
248 {
249 struct ipa_node_params *info = IPA_NODE_REF (node);
250
251 if (!info->descriptors.exists () && param_count)
252 info->descriptors.safe_grow_cleared (param_count);
253 }
254
255 /* Initialize the ipa_node_params structure associated with NODE by counting
256 the function parameters, creating the descriptors and populating their
257 param_decls. */
258
259 void
260 ipa_initialize_node_params (struct cgraph_node *node)
261 {
262 struct ipa_node_params *info = IPA_NODE_REF (node);
263
264 if (!info->descriptors.exists ())
265 {
266 ipa_alloc_node_params (node, count_formal_params (node->decl));
267 ipa_populate_param_decls (node, info->descriptors);
268 }
269 }
270
271 /* Print the jump functions associated with call graph edge CS to file F. */
272
273 static void
274 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
275 {
276 int i, count;
277
278 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
279 for (i = 0; i < count; i++)
280 {
281 struct ipa_jump_func *jump_func;
282 enum jump_func_type type;
283
284 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
285 type = jump_func->type;
286
287 fprintf (f, " param %d: ", i);
288 if (type == IPA_JF_UNKNOWN)
289 fprintf (f, "UNKNOWN\n");
290 else if (type == IPA_JF_KNOWN_TYPE)
291 {
292 fprintf (f, "KNOWN TYPE: base ");
293 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
294 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
295 jump_func->value.known_type.offset);
296 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
297 fprintf (f, "\n");
298 }
299 else if (type == IPA_JF_CONST)
300 {
301 tree val = jump_func->value.constant.value;
302 fprintf (f, "CONST: ");
303 print_generic_expr (f, val, 0);
304 if (TREE_CODE (val) == ADDR_EXPR
305 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
306 {
307 fprintf (f, " -> ");
308 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
309 0);
310 }
311 fprintf (f, "\n");
312 }
313 else if (type == IPA_JF_PASS_THROUGH)
314 {
315 fprintf (f, "PASS THROUGH: ");
316 fprintf (f, "%d, op %s",
317 jump_func->value.pass_through.formal_id,
318 get_tree_code_name(jump_func->value.pass_through.operation));
319 if (jump_func->value.pass_through.operation != NOP_EXPR)
320 {
321 fprintf (f, " ");
322 print_generic_expr (f,
323 jump_func->value.pass_through.operand, 0);
324 }
325 if (jump_func->value.pass_through.agg_preserved)
326 fprintf (f, ", agg_preserved");
327 if (jump_func->value.pass_through.type_preserved)
328 fprintf (f, ", type_preserved");
329 fprintf (f, "\n");
330 }
331 else if (type == IPA_JF_ANCESTOR)
332 {
333 fprintf (f, "ANCESTOR: ");
334 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
335 jump_func->value.ancestor.formal_id,
336 jump_func->value.ancestor.offset);
337 print_generic_expr (f, jump_func->value.ancestor.type, 0);
338 if (jump_func->value.ancestor.agg_preserved)
339 fprintf (f, ", agg_preserved");
340 if (jump_func->value.ancestor.type_preserved)
341 fprintf (f, ", type_preserved");
342 fprintf (f, "\n");
343 }
344
345 if (jump_func->agg.items)
346 {
347 struct ipa_agg_jf_item *item;
348 int j;
349
350 fprintf (f, " Aggregate passed by %s:\n",
351 jump_func->agg.by_ref ? "reference" : "value");
352 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
353 {
354 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
355 item->offset);
356 if (TYPE_P (item->value))
357 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
358 tree_to_uhwi (TYPE_SIZE (item->value)));
359 else
360 {
361 fprintf (f, "cst: ");
362 print_generic_expr (f, item->value, 0);
363 }
364 fprintf (f, "\n");
365 }
366 }
367 if (IPA_EDGE_REF (cs)->polymorphic_call_contexts)
368 ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i)->dump (f);
369 }
370 }
371
372
373 /* Print the jump functions of all arguments on all call graph edges going from
374 NODE to file F. */
375
376 void
377 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
378 {
379 struct cgraph_edge *cs;
380
381 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
382 node->order);
383 for (cs = node->callees; cs; cs = cs->next_callee)
384 {
385 if (!ipa_edge_args_info_available_for_edge_p (cs))
386 continue;
387
388 fprintf (f, " callsite %s/%i -> %s/%i : \n",
389 xstrdup (node->name ()), node->order,
390 xstrdup (cs->callee->name ()),
391 cs->callee->order);
392 ipa_print_node_jump_functions_for_edge (f, cs);
393 }
394
395 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
396 {
397 struct cgraph_indirect_call_info *ii;
398 if (!ipa_edge_args_info_available_for_edge_p (cs))
399 continue;
400
401 ii = cs->indirect_info;
402 if (ii->agg_contents)
403 fprintf (f, " indirect %s callsite, calling param %i, "
404 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
405 ii->member_ptr ? "member ptr" : "aggregate",
406 ii->param_index, ii->offset,
407 ii->by_ref ? "by reference" : "by_value");
408 else
409 fprintf (f, " indirect %s callsite, calling param %i, "
410 "offset " HOST_WIDE_INT_PRINT_DEC,
411 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
412 ii->offset);
413
414 if (cs->call_stmt)
415 {
416 fprintf (f, ", for stmt ");
417 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
418 }
419 else
420 fprintf (f, "\n");
421 if (ii->polymorphic)
422 ii->context.dump (f);
423 ipa_print_node_jump_functions_for_edge (f, cs);
424 }
425 }
426
427 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
428
429 void
430 ipa_print_all_jump_functions (FILE *f)
431 {
432 struct cgraph_node *node;
433
434 fprintf (f, "\nJump functions:\n");
435 FOR_EACH_FUNCTION (node)
436 {
437 ipa_print_node_jump_functions (f, node);
438 }
439 }
440
441 /* Set JFUNC to be a known type jump function. */
442
443 static void
444 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
445 tree base_type, tree component_type)
446 {
447 /* Recording and propagating main variants increases change that types
448 will match. */
449 base_type = TYPE_MAIN_VARIANT (base_type);
450 component_type = TYPE_MAIN_VARIANT (component_type);
451
452 gcc_assert (contains_polymorphic_type_p (base_type)
453 && contains_polymorphic_type_p (component_type));
454 if (!flag_devirtualize)
455 return;
456 jfunc->type = IPA_JF_KNOWN_TYPE;
457 jfunc->value.known_type.offset = offset,
458 jfunc->value.known_type.base_type = base_type;
459 jfunc->value.known_type.component_type = component_type;
460 gcc_assert (component_type);
461 }
462
463 /* Set JFUNC to be a copy of another jmp (to be used by jump function
464 combination code). The two functions will share their rdesc. */
465
466 static void
467 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
468 struct ipa_jump_func *src)
469
470 {
471 gcc_checking_assert (src->type == IPA_JF_CONST);
472 dst->type = IPA_JF_CONST;
473 dst->value.constant = src->value.constant;
474 }
475
476 /* Set JFUNC to be a constant jmp function. */
477
478 static void
479 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
480 struct cgraph_edge *cs)
481 {
482 constant = unshare_expr (constant);
483 if (constant && EXPR_P (constant))
484 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
485 jfunc->type = IPA_JF_CONST;
486 jfunc->value.constant.value = unshare_expr_without_location (constant);
487
488 if (TREE_CODE (constant) == ADDR_EXPR
489 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
490 {
491 struct ipa_cst_ref_desc *rdesc;
492 if (!ipa_refdesc_pool)
493 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
494 sizeof (struct ipa_cst_ref_desc), 32);
495
496 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
497 rdesc->cs = cs;
498 rdesc->next_duplicate = NULL;
499 rdesc->refcount = 1;
500 jfunc->value.constant.rdesc = rdesc;
501 }
502 else
503 jfunc->value.constant.rdesc = NULL;
504 }
505
506 /* Set JFUNC to be a simple pass-through jump function. */
507 static void
508 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
509 bool agg_preserved, bool type_preserved)
510 {
511 jfunc->type = IPA_JF_PASS_THROUGH;
512 jfunc->value.pass_through.operand = NULL_TREE;
513 jfunc->value.pass_through.formal_id = formal_id;
514 jfunc->value.pass_through.operation = NOP_EXPR;
515 jfunc->value.pass_through.agg_preserved = agg_preserved;
516 jfunc->value.pass_through.type_preserved = type_preserved;
517 }
518
519 /* Set JFUNC to be an arithmetic pass through jump function. */
520
521 static void
522 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
523 tree operand, enum tree_code operation)
524 {
525 jfunc->type = IPA_JF_PASS_THROUGH;
526 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
527 jfunc->value.pass_through.formal_id = formal_id;
528 jfunc->value.pass_through.operation = operation;
529 jfunc->value.pass_through.agg_preserved = false;
530 jfunc->value.pass_through.type_preserved = false;
531 }
532
533 /* Set JFUNC to be an ancestor jump function. */
534
535 static void
536 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
537 tree type, int formal_id, bool agg_preserved,
538 bool type_preserved)
539 {
540 if (!flag_devirtualize)
541 type_preserved = false;
542 if (!type_preserved)
543 type = NULL_TREE;
544 if (type)
545 type = TYPE_MAIN_VARIANT (type);
546 gcc_assert (!type_preserved || contains_polymorphic_type_p (type));
547 jfunc->type = IPA_JF_ANCESTOR;
548 jfunc->value.ancestor.formal_id = formal_id;
549 jfunc->value.ancestor.offset = offset;
550 jfunc->value.ancestor.type = type_preserved ? type : NULL;
551 jfunc->value.ancestor.agg_preserved = agg_preserved;
552 jfunc->value.ancestor.type_preserved = type_preserved;
553 }
554
555 /* Extract the acual BINFO being described by JFUNC which must be a known type
556 jump function. */
557
558 tree
559 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
560 {
561 if (!RECORD_OR_UNION_TYPE_P (jfunc->value.known_type.base_type))
562 return NULL_TREE;
563
564 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
565
566 if (!base_binfo)
567 return NULL_TREE;
568 /* FIXME: At LTO we can't propagate to non-polymorphic type, because
569 we have no ODR equivalency on those. This should be fixed by
570 propagating on types rather than binfos that would make type
571 matching here unnecesary. */
572 if (in_lto_p
573 && (TREE_CODE (jfunc->value.known_type.component_type) != RECORD_TYPE
574 || !TYPE_BINFO (jfunc->value.known_type.component_type)
575 || !BINFO_VTABLE (TYPE_BINFO (jfunc->value.known_type.component_type))))
576 {
577 if (!jfunc->value.known_type.offset)
578 return base_binfo;
579 return NULL;
580 }
581 return get_binfo_at_offset (base_binfo,
582 jfunc->value.known_type.offset,
583 jfunc->value.known_type.component_type);
584 }
585
586 /* Get IPA BB information about the given BB. FBI is the context of analyzis
587 of this function body. */
588
589 static struct ipa_bb_info *
590 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
591 {
592 gcc_checking_assert (fbi);
593 return &fbi->bb_infos[bb->index];
594 }
595
596 /* Structure to be passed in between detect_type_change and
597 check_stmt_for_type_change. */
598
599 struct prop_type_change_info
600 {
601 /* Offset into the object where there is the virtual method pointer we are
602 looking for. */
603 HOST_WIDE_INT offset;
604 /* The declaration or SSA_NAME pointer of the base that we are checking for
605 type change. */
606 tree object;
607 /* If we actually can tell the type that the object has changed to, it is
608 stored in this field. Otherwise it remains NULL_TREE. */
609 tree known_current_type;
610 /* Set to true if dynamic type change has been detected. */
611 bool type_maybe_changed;
612 /* Set to true if multiple types have been encountered. known_current_type
613 must be disregarded in that case. */
614 bool multiple_types_encountered;
615 };
616
617 /* Return true if STMT can modify a virtual method table pointer.
618
619 This function makes special assumptions about both constructors and
620 destructors which are all the functions that are allowed to alter the VMT
621 pointers. It assumes that destructors begin with assignment into all VMT
622 pointers and that constructors essentially look in the following way:
623
624 1) The very first thing they do is that they call constructors of ancestor
625 sub-objects that have them.
626
627 2) Then VMT pointers of this and all its ancestors is set to new values
628 corresponding to the type corresponding to the constructor.
629
630 3) Only afterwards, other stuff such as constructor of member sub-objects
631 and the code written by the user is run. Only this may include calling
632 virtual functions, directly or indirectly.
633
634 There is no way to call a constructor of an ancestor sub-object in any
635 other way.
636
637 This means that we do not have to care whether constructors get the correct
638 type information because they will always change it (in fact, if we define
639 the type to be given by the VMT pointer, it is undefined).
640
641 The most important fact to derive from the above is that if, for some
642 statement in the section 3, we try to detect whether the dynamic type has
643 changed, we can safely ignore all calls as we examine the function body
644 backwards until we reach statements in section 2 because these calls cannot
645 be ancestor constructors or destructors (if the input is not bogus) and so
646 do not change the dynamic type (this holds true only for automatically
647 allocated objects but at the moment we devirtualize only these). We then
648 must detect that statements in section 2 change the dynamic type and can try
649 to derive the new type. That is enough and we can stop, we will never see
650 the calls into constructors of sub-objects in this code. Therefore we can
651 safely ignore all call statements that we traverse.
652 */
653
654 static bool
655 stmt_may_be_vtbl_ptr_store (gimple stmt)
656 {
657 if (is_gimple_call (stmt))
658 return false;
659 if (gimple_clobber_p (stmt))
660 return false;
661 else if (is_gimple_assign (stmt))
662 {
663 tree lhs = gimple_assign_lhs (stmt);
664
665 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
666 {
667 if (flag_strict_aliasing
668 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
669 return false;
670
671 if (TREE_CODE (lhs) == COMPONENT_REF
672 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
673 return false;
674 /* In the future we might want to use get_base_ref_and_offset to find
675 if there is a field corresponding to the offset and if so, proceed
676 almost like if it was a component ref. */
677 }
678 }
679 return true;
680 }
681
682 /* If STMT can be proved to be an assignment to the virtual method table
683 pointer of ANALYZED_OBJ and the type associated with the new table
684 identified, return the type. Otherwise return NULL_TREE. */
685
686 static tree
687 extr_type_from_vtbl_ptr_store (gimple stmt, struct prop_type_change_info *tci)
688 {
689 HOST_WIDE_INT offset, size, max_size;
690 tree lhs, rhs, base, binfo;
691
692 if (!gimple_assign_single_p (stmt))
693 return NULL_TREE;
694
695 lhs = gimple_assign_lhs (stmt);
696 rhs = gimple_assign_rhs1 (stmt);
697 if (TREE_CODE (lhs) != COMPONENT_REF
698 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
699 return NULL_TREE;
700
701 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
702 if (offset != tci->offset
703 || size != POINTER_SIZE
704 || max_size != POINTER_SIZE)
705 return NULL_TREE;
706 if (TREE_CODE (base) == MEM_REF)
707 {
708 if (TREE_CODE (tci->object) != MEM_REF
709 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
710 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
711 TREE_OPERAND (base, 1)))
712 return NULL_TREE;
713 }
714 else if (tci->object != base)
715 return NULL_TREE;
716
717 binfo = vtable_pointer_value_to_binfo (rhs);
718
719 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
720 base of outer type. In this case we would need to either
721 work on binfos or translate it back to outer type and offset.
722 KNOWN_TYPE jump functions are not ready for that, yet. */
723 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
724 return NULL;
725
726 return BINFO_TYPE (binfo);
727 }
728
729 /* Callback of walk_aliased_vdefs and a helper function for
730 detect_type_change to check whether a particular statement may modify
731 the virtual table pointer, and if possible also determine the new type of
732 the (sub-)object. It stores its result into DATA, which points to a
733 prop_type_change_info structure. */
734
735 static bool
736 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
737 {
738 gimple stmt = SSA_NAME_DEF_STMT (vdef);
739 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
740
741 if (stmt_may_be_vtbl_ptr_store (stmt))
742 {
743 tree type;
744
745 type = extr_type_from_vtbl_ptr_store (stmt, tci);
746 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
747 if (tci->type_maybe_changed
748 && type != tci->known_current_type)
749 tci->multiple_types_encountered = true;
750 tci->known_current_type = type;
751 tci->type_maybe_changed = true;
752 return true;
753 }
754 else
755 return false;
756 }
757
758 /* See if ARG is PARAM_DECl describing instance passed by pointer
759 or reference in FUNCTION. Return false if the dynamic type may change
760 in between beggining of the function until CALL is invoked.
761
762 Generally functions are not allowed to change type of such instances,
763 but they call destructors. We assume that methods can not destroy the THIS
764 pointer. Also as a special cases, constructor and destructors may change
765 type of the THIS pointer. */
766
767 static bool
768 param_type_may_change_p (tree function, tree arg, gimple call)
769 {
770 /* Pure functions can not do any changes on the dynamic type;
771 that require writting to memory. */
772 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
773 return false;
774 /* We need to check if we are within inlined consturctor
775 or destructor (ideally we would have way to check that the
776 inline cdtor is actually working on ARG, but we don't have
777 easy tie on this, so punt on all non-pure cdtors.
778 We may also record the types of cdtors and once we know type
779 of the instance match them.
780
781 Also code unification optimizations may merge calls from
782 different blocks making return values unreliable. So
783 do nothing during late optimization. */
784 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
785 return true;
786 if (TREE_CODE (arg) == SSA_NAME
787 && SSA_NAME_IS_DEFAULT_DEF (arg)
788 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
789 {
790 /* Normal (non-THIS) argument. */
791 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
792 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
793 /* THIS pointer of an method - here we we want to watch constructors
794 and destructors as those definitely may change the dynamic
795 type. */
796 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
797 && !DECL_CXX_CONSTRUCTOR_P (function)
798 && !DECL_CXX_DESTRUCTOR_P (function)
799 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
800 {
801 /* Walk the inline stack and watch out for ctors/dtors. */
802 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
803 block = BLOCK_SUPERCONTEXT (block))
804 if (BLOCK_ABSTRACT_ORIGIN (block)
805 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
806 {
807 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
808
809 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
810 continue;
811 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
812 && (DECL_CXX_CONSTRUCTOR_P (fn)
813 || DECL_CXX_DESTRUCTOR_P (fn)))
814 return true;
815 }
816 return false;
817 }
818 }
819 return true;
820 }
821
822 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
823 callsite CALL) by looking for assignments to its virtual table pointer. If
824 it is, return true and fill in the jump function JFUNC with relevant type
825 information or set it to unknown. ARG is the object itself (not a pointer
826 to it, unless dereferenced). BASE is the base of the memory access as
827 returned by get_ref_base_and_extent, as is the offset.
828
829 This is helper function for detect_type_change and detect_type_change_ssa
830 that does the heavy work which is usually unnecesary. */
831
832 static bool
833 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
834 gimple call, struct ipa_jump_func *jfunc,
835 HOST_WIDE_INT offset)
836 {
837 struct prop_type_change_info tci;
838 ao_ref ao;
839 bool entry_reached = false;
840
841 gcc_checking_assert (DECL_P (arg)
842 || TREE_CODE (arg) == MEM_REF
843 || handled_component_p (arg));
844
845 comp_type = TYPE_MAIN_VARIANT (comp_type);
846
847 /* Const calls cannot call virtual methods through VMT and so type changes do
848 not matter. */
849 if (!flag_devirtualize || !gimple_vuse (call)
850 /* Be sure expected_type is polymorphic. */
851 || !comp_type
852 || TREE_CODE (comp_type) != RECORD_TYPE
853 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
854 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
855 return true;
856
857 ao_ref_init (&ao, arg);
858 ao.base = base;
859 ao.offset = offset;
860 ao.size = POINTER_SIZE;
861 ao.max_size = ao.size;
862
863 tci.offset = offset;
864 tci.object = get_base_address (arg);
865 tci.known_current_type = NULL_TREE;
866 tci.type_maybe_changed = false;
867 tci.multiple_types_encountered = false;
868
869 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
870 &tci, NULL, &entry_reached);
871 if (!tci.type_maybe_changed)
872 return false;
873
874 if (!tci.known_current_type
875 || tci.multiple_types_encountered
876 || offset != 0
877 /* When the walk reached function entry, it means that type
878 is set along some paths but not along others. */
879 || entry_reached)
880 jfunc->type = IPA_JF_UNKNOWN;
881 else
882 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
883
884 return true;
885 }
886
887 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
888 If it is, return true and fill in the jump function JFUNC with relevant type
889 information or set it to unknown. ARG is the object itself (not a pointer
890 to it, unless dereferenced). BASE is the base of the memory access as
891 returned by get_ref_base_and_extent, as is the offset. */
892
893 static bool
894 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
895 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
896 {
897 if (!flag_devirtualize)
898 return false;
899
900 if (TREE_CODE (base) == MEM_REF
901 && !param_type_may_change_p (current_function_decl,
902 TREE_OPERAND (base, 0),
903 call))
904 return false;
905 return detect_type_change_from_memory_writes (arg, base, comp_type,
906 call, jfunc, offset);
907 }
908
909 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
910 SSA name (its dereference will become the base and the offset is assumed to
911 be zero). */
912
913 static bool
914 detect_type_change_ssa (tree arg, tree comp_type,
915 gimple call, struct ipa_jump_func *jfunc)
916 {
917 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
918 if (!flag_devirtualize
919 || !POINTER_TYPE_P (TREE_TYPE (arg)))
920 return false;
921
922 if (!param_type_may_change_p (current_function_decl, arg, call))
923 return false;
924
925 arg = build2 (MEM_REF, ptr_type_node, arg,
926 build_int_cst (ptr_type_node, 0));
927
928 return detect_type_change_from_memory_writes (arg, arg, comp_type,
929 call, jfunc, 0);
930 }
931
932 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
933 boolean variable pointed to by DATA. */
934
935 static bool
936 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
937 void *data)
938 {
939 bool *b = (bool *) data;
940 *b = true;
941 return true;
942 }
943
944 /* Return true if we have already walked so many statements in AA that we
945 should really just start giving up. */
946
947 static bool
948 aa_overwalked (struct func_body_info *fbi)
949 {
950 gcc_checking_assert (fbi);
951 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
952 }
953
954 /* Find the nearest valid aa status for parameter specified by INDEX that
955 dominates BB. */
956
957 static struct param_aa_status *
958 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
959 int index)
960 {
961 while (true)
962 {
963 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
964 if (!bb)
965 return NULL;
966 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
967 if (!bi->param_aa_statuses.is_empty ()
968 && bi->param_aa_statuses[index].valid)
969 return &bi->param_aa_statuses[index];
970 }
971 }
972
973 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
974 structures and/or intialize the result with a dominating description as
975 necessary. */
976
977 static struct param_aa_status *
978 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
979 int index)
980 {
981 gcc_checking_assert (fbi);
982 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
983 if (bi->param_aa_statuses.is_empty ())
984 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
985 struct param_aa_status *paa = &bi->param_aa_statuses[index];
986 if (!paa->valid)
987 {
988 gcc_checking_assert (!paa->parm_modified
989 && !paa->ref_modified
990 && !paa->pt_modified);
991 struct param_aa_status *dom_paa;
992 dom_paa = find_dominating_aa_status (fbi, bb, index);
993 if (dom_paa)
994 *paa = *dom_paa;
995 else
996 paa->valid = true;
997 }
998
999 return paa;
1000 }
1001
1002 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1003 a value known not to be modified in this function before reaching the
1004 statement STMT. FBI holds information about the function we have so far
1005 gathered but do not survive the summary building stage. */
1006
1007 static bool
1008 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
1009 gimple stmt, tree parm_load)
1010 {
1011 struct param_aa_status *paa;
1012 bool modified = false;
1013 ao_ref refd;
1014
1015 /* FIXME: FBI can be NULL if we are being called from outside
1016 ipa_node_analysis or ipcp_transform_function, which currently happens
1017 during inlining analysis. It would be great to extend fbi's lifetime and
1018 always have it. Currently, we are just not afraid of too much walking in
1019 that case. */
1020 if (fbi)
1021 {
1022 if (aa_overwalked (fbi))
1023 return false;
1024 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1025 if (paa->parm_modified)
1026 return false;
1027 }
1028 else
1029 paa = NULL;
1030
1031 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1032 ao_ref_init (&refd, parm_load);
1033 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1034 &modified, NULL);
1035 if (fbi)
1036 fbi->aa_walked += walked;
1037 if (paa && modified)
1038 paa->parm_modified = true;
1039 return !modified;
1040 }
1041
1042 /* If STMT is an assignment that loads a value from an parameter declaration,
1043 return the index of the parameter in ipa_node_params which has not been
1044 modified. Otherwise return -1. */
1045
1046 static int
1047 load_from_unmodified_param (struct func_body_info *fbi,
1048 vec<ipa_param_descriptor> descriptors,
1049 gimple stmt)
1050 {
1051 int index;
1052 tree op1;
1053
1054 if (!gimple_assign_single_p (stmt))
1055 return -1;
1056
1057 op1 = gimple_assign_rhs1 (stmt);
1058 if (TREE_CODE (op1) != PARM_DECL)
1059 return -1;
1060
1061 index = ipa_get_param_decl_index_1 (descriptors, op1);
1062 if (index < 0
1063 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1064 return -1;
1065
1066 return index;
1067 }
1068
1069 /* Return true if memory reference REF (which must be a load through parameter
1070 with INDEX) loads data that are known to be unmodified in this function
1071 before reaching statement STMT. */
1072
1073 static bool
1074 parm_ref_data_preserved_p (struct func_body_info *fbi,
1075 int index, gimple stmt, tree ref)
1076 {
1077 struct param_aa_status *paa;
1078 bool modified = false;
1079 ao_ref refd;
1080
1081 /* FIXME: FBI can be NULL if we are being called from outside
1082 ipa_node_analysis or ipcp_transform_function, which currently happens
1083 during inlining analysis. It would be great to extend fbi's lifetime and
1084 always have it. Currently, we are just not afraid of too much walking in
1085 that case. */
1086 if (fbi)
1087 {
1088 if (aa_overwalked (fbi))
1089 return false;
1090 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1091 if (paa->ref_modified)
1092 return false;
1093 }
1094 else
1095 paa = NULL;
1096
1097 gcc_checking_assert (gimple_vuse (stmt));
1098 ao_ref_init (&refd, ref);
1099 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1100 &modified, NULL);
1101 if (fbi)
1102 fbi->aa_walked += walked;
1103 if (paa && modified)
1104 paa->ref_modified = true;
1105 return !modified;
1106 }
1107
1108 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1109 is known to be unmodified in this function before reaching call statement
1110 CALL into which it is passed. FBI describes the function body. */
1111
1112 static bool
1113 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1114 gimple call, tree parm)
1115 {
1116 bool modified = false;
1117 ao_ref refd;
1118
1119 /* It's unnecessary to calculate anything about memory contnets for a const
1120 function because it is not goin to use it. But do not cache the result
1121 either. Also, no such calculations for non-pointers. */
1122 if (!gimple_vuse (call)
1123 || !POINTER_TYPE_P (TREE_TYPE (parm))
1124 || aa_overwalked (fbi))
1125 return false;
1126
1127 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1128 index);
1129 if (paa->pt_modified)
1130 return false;
1131
1132 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1133 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1134 &modified, NULL);
1135 fbi->aa_walked += walked;
1136 if (modified)
1137 paa->pt_modified = true;
1138 return !modified;
1139 }
1140
1141 /* Return true if we can prove that OP is a memory reference loading unmodified
1142 data from an aggregate passed as a parameter and if the aggregate is passed
1143 by reference, that the alias type of the load corresponds to the type of the
1144 formal parameter (so that we can rely on this type for TBAA in callers).
1145 INFO and PARMS_AINFO describe parameters of the current function (but the
1146 latter can be NULL), STMT is the load statement. If function returns true,
1147 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1148 within the aggregate and whether it is a load from a value passed by
1149 reference respectively. */
1150
1151 static bool
1152 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1153 vec<ipa_param_descriptor> descriptors,
1154 gimple stmt, tree op, int *index_p,
1155 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1156 bool *by_ref_p)
1157 {
1158 int index;
1159 HOST_WIDE_INT size, max_size;
1160 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1161
1162 if (max_size == -1 || max_size != size || *offset_p < 0)
1163 return false;
1164
1165 if (DECL_P (base))
1166 {
1167 int index = ipa_get_param_decl_index_1 (descriptors, base);
1168 if (index >= 0
1169 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1170 {
1171 *index_p = index;
1172 *by_ref_p = false;
1173 if (size_p)
1174 *size_p = size;
1175 return true;
1176 }
1177 return false;
1178 }
1179
1180 if (TREE_CODE (base) != MEM_REF
1181 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1182 || !integer_zerop (TREE_OPERAND (base, 1)))
1183 return false;
1184
1185 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1186 {
1187 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1188 index = ipa_get_param_decl_index_1 (descriptors, parm);
1189 }
1190 else
1191 {
1192 /* This branch catches situations where a pointer parameter is not a
1193 gimple register, for example:
1194
1195 void hip7(S*) (struct S * p)
1196 {
1197 void (*<T2e4>) (struct S *) D.1867;
1198 struct S * p.1;
1199
1200 <bb 2>:
1201 p.1_1 = p;
1202 D.1867_2 = p.1_1->f;
1203 D.1867_2 ();
1204 gdp = &p;
1205 */
1206
1207 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1208 index = load_from_unmodified_param (fbi, descriptors, def);
1209 }
1210
1211 if (index >= 0
1212 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1213 {
1214 *index_p = index;
1215 *by_ref_p = true;
1216 if (size_p)
1217 *size_p = size;
1218 return true;
1219 }
1220 return false;
1221 }
1222
1223 /* Just like the previous function, just without the param_analysis_info
1224 pointer, for users outside of this file. */
1225
1226 bool
1227 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1228 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1229 bool *by_ref_p)
1230 {
1231 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1232 offset_p, NULL, by_ref_p);
1233 }
1234
1235 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1236 of an assignment statement STMT, try to determine whether we are actually
1237 handling any of the following cases and construct an appropriate jump
1238 function into JFUNC if so:
1239
1240 1) The passed value is loaded from a formal parameter which is not a gimple
1241 register (most probably because it is addressable, the value has to be
1242 scalar) and we can guarantee the value has not changed. This case can
1243 therefore be described by a simple pass-through jump function. For example:
1244
1245 foo (int a)
1246 {
1247 int a.0;
1248
1249 a.0_2 = a;
1250 bar (a.0_2);
1251
1252 2) The passed value can be described by a simple arithmetic pass-through
1253 jump function. E.g.
1254
1255 foo (int a)
1256 {
1257 int D.2064;
1258
1259 D.2064_4 = a.1(D) + 4;
1260 bar (D.2064_4);
1261
1262 This case can also occur in combination of the previous one, e.g.:
1263
1264 foo (int a, int z)
1265 {
1266 int a.0;
1267 int D.2064;
1268
1269 a.0_3 = a;
1270 D.2064_4 = a.0_3 + 4;
1271 foo (D.2064_4);
1272
1273 3) The passed value is an address of an object within another one (which
1274 also passed by reference). Such situations are described by an ancestor
1275 jump function and describe situations such as:
1276
1277 B::foo() (struct B * const this)
1278 {
1279 struct A * D.1845;
1280
1281 D.1845_2 = &this_1(D)->D.1748;
1282 A::bar (D.1845_2);
1283
1284 INFO is the structure describing individual parameters access different
1285 stages of IPA optimizations. PARMS_AINFO contains the information that is
1286 only needed for intraprocedural analysis. */
1287
1288 static void
1289 compute_complex_assign_jump_func (struct func_body_info *fbi,
1290 struct ipa_node_params *info,
1291 struct ipa_jump_func *jfunc,
1292 gimple call, gimple stmt, tree name,
1293 tree param_type)
1294 {
1295 HOST_WIDE_INT offset, size, max_size;
1296 tree op1, tc_ssa, base, ssa;
1297 int index;
1298
1299 op1 = gimple_assign_rhs1 (stmt);
1300
1301 if (TREE_CODE (op1) == SSA_NAME)
1302 {
1303 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1304 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1305 else
1306 index = load_from_unmodified_param (fbi, info->descriptors,
1307 SSA_NAME_DEF_STMT (op1));
1308 tc_ssa = op1;
1309 }
1310 else
1311 {
1312 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1313 tc_ssa = gimple_assign_lhs (stmt);
1314 }
1315
1316 if (index >= 0)
1317 {
1318 tree op2 = gimple_assign_rhs2 (stmt);
1319
1320 if (op2)
1321 {
1322 if (!is_gimple_ip_invariant (op2)
1323 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1324 && !useless_type_conversion_p (TREE_TYPE (name),
1325 TREE_TYPE (op1))))
1326 return;
1327
1328 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1329 gimple_assign_rhs_code (stmt));
1330 }
1331 else if (gimple_assign_single_p (stmt))
1332 {
1333 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1334 bool type_p = false;
1335
1336 if (param_type && POINTER_TYPE_P (param_type))
1337 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1338 call, jfunc);
1339 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1340 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1341 }
1342 return;
1343 }
1344
1345 if (TREE_CODE (op1) != ADDR_EXPR)
1346 return;
1347 op1 = TREE_OPERAND (op1, 0);
1348 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1349 return;
1350 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1351 if (TREE_CODE (base) != MEM_REF
1352 /* If this is a varying address, punt. */
1353 || max_size == -1
1354 || max_size != size)
1355 return;
1356 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1357 ssa = TREE_OPERAND (base, 0);
1358 if (TREE_CODE (ssa) != SSA_NAME
1359 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1360 || offset < 0)
1361 return;
1362
1363 /* Dynamic types are changed in constructors and destructors. */
1364 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1365 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1366 {
1367 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1368 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1369 call, jfunc, offset));
1370 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1371 ipa_set_ancestor_jf (jfunc, offset,
1372 type_p ? TREE_TYPE (param_type) : NULL, index,
1373 parm_ref_data_pass_through_p (fbi, index,
1374 call, ssa), type_p);
1375 }
1376 }
1377
1378 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1379 it looks like:
1380
1381 iftmp.1_3 = &obj_2(D)->D.1762;
1382
1383 The base of the MEM_REF must be a default definition SSA NAME of a
1384 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1385 whole MEM_REF expression is returned and the offset calculated from any
1386 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1387 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1388
1389 static tree
1390 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1391 {
1392 HOST_WIDE_INT size, max_size;
1393 tree expr, parm, obj;
1394
1395 if (!gimple_assign_single_p (assign))
1396 return NULL_TREE;
1397 expr = gimple_assign_rhs1 (assign);
1398
1399 if (TREE_CODE (expr) != ADDR_EXPR)
1400 return NULL_TREE;
1401 expr = TREE_OPERAND (expr, 0);
1402 obj = expr;
1403 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1404
1405 if (TREE_CODE (expr) != MEM_REF
1406 /* If this is a varying address, punt. */
1407 || max_size == -1
1408 || max_size != size
1409 || *offset < 0)
1410 return NULL_TREE;
1411 parm = TREE_OPERAND (expr, 0);
1412 if (TREE_CODE (parm) != SSA_NAME
1413 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1414 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1415 return NULL_TREE;
1416
1417 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1418 *obj_p = obj;
1419 return expr;
1420 }
1421
1422
1423 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1424 statement PHI, try to find out whether NAME is in fact a
1425 multiple-inheritance typecast from a descendant into an ancestor of a formal
1426 parameter and thus can be described by an ancestor jump function and if so,
1427 write the appropriate function into JFUNC.
1428
1429 Essentially we want to match the following pattern:
1430
1431 if (obj_2(D) != 0B)
1432 goto <bb 3>;
1433 else
1434 goto <bb 4>;
1435
1436 <bb 3>:
1437 iftmp.1_3 = &obj_2(D)->D.1762;
1438
1439 <bb 4>:
1440 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1441 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1442 return D.1879_6; */
1443
1444 static void
1445 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1446 struct ipa_node_params *info,
1447 struct ipa_jump_func *jfunc,
1448 gimple call, gimple phi, tree param_type)
1449 {
1450 HOST_WIDE_INT offset;
1451 gimple assign, cond;
1452 basic_block phi_bb, assign_bb, cond_bb;
1453 tree tmp, parm, expr, obj;
1454 int index, i;
1455
1456 if (gimple_phi_num_args (phi) != 2)
1457 return;
1458
1459 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1460 tmp = PHI_ARG_DEF (phi, 0);
1461 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1462 tmp = PHI_ARG_DEF (phi, 1);
1463 else
1464 return;
1465 if (TREE_CODE (tmp) != SSA_NAME
1466 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1467 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1468 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1469 return;
1470
1471 assign = SSA_NAME_DEF_STMT (tmp);
1472 assign_bb = gimple_bb (assign);
1473 if (!single_pred_p (assign_bb))
1474 return;
1475 expr = get_ancestor_addr_info (assign, &obj, &offset);
1476 if (!expr)
1477 return;
1478 parm = TREE_OPERAND (expr, 0);
1479 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1480 if (index < 0)
1481 return;
1482
1483 cond_bb = single_pred (assign_bb);
1484 cond = last_stmt (cond_bb);
1485 if (!cond
1486 || gimple_code (cond) != GIMPLE_COND
1487 || gimple_cond_code (cond) != NE_EXPR
1488 || gimple_cond_lhs (cond) != parm
1489 || !integer_zerop (gimple_cond_rhs (cond)))
1490 return;
1491
1492 phi_bb = gimple_bb (phi);
1493 for (i = 0; i < 2; i++)
1494 {
1495 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1496 if (pred != assign_bb && pred != cond_bb)
1497 return;
1498 }
1499
1500 bool type_p = false;
1501 if (param_type && POINTER_TYPE_P (param_type)
1502 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
1503 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1504 call, jfunc, offset);
1505 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1506 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1507 index,
1508 parm_ref_data_pass_through_p (fbi, index, call, parm),
1509 type_p);
1510 }
1511
1512 /* Given OP which is passed as an actual argument to a called function,
1513 determine if it is possible to construct a KNOWN_TYPE jump function for it
1514 and if so, create one and store it to JFUNC.
1515 EXPECTED_TYPE represents a type the argument should be in */
1516
1517 static void
1518 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1519 gimple call, tree expected_type)
1520 {
1521 HOST_WIDE_INT offset, size, max_size;
1522 tree base;
1523
1524 if (!flag_devirtualize
1525 || TREE_CODE (op) != ADDR_EXPR
1526 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
1527 /* Be sure expected_type is polymorphic. */
1528 || !expected_type
1529 || !contains_polymorphic_type_p (expected_type))
1530 return;
1531
1532 op = TREE_OPERAND (op, 0);
1533 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1534 if (!DECL_P (base)
1535 || max_size == -1
1536 || max_size != size
1537 || !contains_polymorphic_type_p (TREE_TYPE (base)))
1538 return;
1539
1540 if (decl_maybe_in_construction_p (base, TREE_TYPE (base),
1541 call, current_function_decl)
1542 /* Even if the var seems to be in construction by inline call stack,
1543 we may work out the actual type by walking memory writes. */
1544 && (is_global_var (base)
1545 || detect_type_change (op, base, expected_type, call, jfunc, offset)))
1546 return;
1547
1548 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1549 expected_type);
1550 }
1551
1552 /* Inspect the given TYPE and return true iff it has the same structure (the
1553 same number of fields of the same types) as a C++ member pointer. If
1554 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1555 corresponding fields there. */
1556
1557 static bool
1558 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1559 {
1560 tree fld;
1561
1562 if (TREE_CODE (type) != RECORD_TYPE)
1563 return false;
1564
1565 fld = TYPE_FIELDS (type);
1566 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1567 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1568 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1569 return false;
1570
1571 if (method_ptr)
1572 *method_ptr = fld;
1573
1574 fld = DECL_CHAIN (fld);
1575 if (!fld || INTEGRAL_TYPE_P (fld)
1576 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1577 return false;
1578 if (delta)
1579 *delta = fld;
1580
1581 if (DECL_CHAIN (fld))
1582 return false;
1583
1584 return true;
1585 }
1586
1587 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1588 return the rhs of its defining statement. Otherwise return RHS as it
1589 is. */
1590
1591 static inline tree
1592 get_ssa_def_if_simple_copy (tree rhs)
1593 {
1594 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1595 {
1596 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1597
1598 if (gimple_assign_single_p (def_stmt))
1599 rhs = gimple_assign_rhs1 (def_stmt);
1600 else
1601 break;
1602 }
1603 return rhs;
1604 }
1605
1606 /* Simple linked list, describing known contents of an aggregate beforere
1607 call. */
1608
1609 struct ipa_known_agg_contents_list
1610 {
1611 /* Offset and size of the described part of the aggregate. */
1612 HOST_WIDE_INT offset, size;
1613 /* Known constant value or NULL if the contents is known to be unknown. */
1614 tree constant;
1615 /* Pointer to the next structure in the list. */
1616 struct ipa_known_agg_contents_list *next;
1617 };
1618
1619 /* Find the proper place in linked list of ipa_known_agg_contents_list
1620 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1621 unless there is a partial overlap, in which case return NULL, or such
1622 element is already there, in which case set *ALREADY_THERE to true. */
1623
1624 static struct ipa_known_agg_contents_list **
1625 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1626 HOST_WIDE_INT lhs_offset,
1627 HOST_WIDE_INT lhs_size,
1628 bool *already_there)
1629 {
1630 struct ipa_known_agg_contents_list **p = list;
1631 while (*p && (*p)->offset < lhs_offset)
1632 {
1633 if ((*p)->offset + (*p)->size > lhs_offset)
1634 return NULL;
1635 p = &(*p)->next;
1636 }
1637
1638 if (*p && (*p)->offset < lhs_offset + lhs_size)
1639 {
1640 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1641 /* We already know this value is subsequently overwritten with
1642 something else. */
1643 *already_there = true;
1644 else
1645 /* Otherwise this is a partial overlap which we cannot
1646 represent. */
1647 return NULL;
1648 }
1649 return p;
1650 }
1651
1652 /* Build aggregate jump function from LIST, assuming there are exactly
1653 CONST_COUNT constant entries there and that th offset of the passed argument
1654 is ARG_OFFSET and store it into JFUNC. */
1655
1656 static void
1657 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1658 int const_count, HOST_WIDE_INT arg_offset,
1659 struct ipa_jump_func *jfunc)
1660 {
1661 vec_alloc (jfunc->agg.items, const_count);
1662 while (list)
1663 {
1664 if (list->constant)
1665 {
1666 struct ipa_agg_jf_item item;
1667 item.offset = list->offset - arg_offset;
1668 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1669 item.value = unshare_expr_without_location (list->constant);
1670 jfunc->agg.items->quick_push (item);
1671 }
1672 list = list->next;
1673 }
1674 }
1675
1676 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1677 in ARG is filled in with constant values. ARG can either be an aggregate
1678 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1679 aggregate. JFUNC is the jump function into which the constants are
1680 subsequently stored. */
1681
1682 static void
1683 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1684 struct ipa_jump_func *jfunc)
1685 {
1686 struct ipa_known_agg_contents_list *list = NULL;
1687 int item_count = 0, const_count = 0;
1688 HOST_WIDE_INT arg_offset, arg_size;
1689 gimple_stmt_iterator gsi;
1690 tree arg_base;
1691 bool check_ref, by_ref;
1692 ao_ref r;
1693
1694 /* The function operates in three stages. First, we prepare check_ref, r,
1695 arg_base and arg_offset based on what is actually passed as an actual
1696 argument. */
1697
1698 if (POINTER_TYPE_P (arg_type))
1699 {
1700 by_ref = true;
1701 if (TREE_CODE (arg) == SSA_NAME)
1702 {
1703 tree type_size;
1704 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1705 return;
1706 check_ref = true;
1707 arg_base = arg;
1708 arg_offset = 0;
1709 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1710 arg_size = tree_to_uhwi (type_size);
1711 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1712 }
1713 else if (TREE_CODE (arg) == ADDR_EXPR)
1714 {
1715 HOST_WIDE_INT arg_max_size;
1716
1717 arg = TREE_OPERAND (arg, 0);
1718 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1719 &arg_max_size);
1720 if (arg_max_size == -1
1721 || arg_max_size != arg_size
1722 || arg_offset < 0)
1723 return;
1724 if (DECL_P (arg_base))
1725 {
1726 check_ref = false;
1727 ao_ref_init (&r, arg_base);
1728 }
1729 else
1730 return;
1731 }
1732 else
1733 return;
1734 }
1735 else
1736 {
1737 HOST_WIDE_INT arg_max_size;
1738
1739 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1740
1741 by_ref = false;
1742 check_ref = false;
1743 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1744 &arg_max_size);
1745 if (arg_max_size == -1
1746 || arg_max_size != arg_size
1747 || arg_offset < 0)
1748 return;
1749
1750 ao_ref_init (&r, arg);
1751 }
1752
1753 /* Second stage walks back the BB, looks at individual statements and as long
1754 as it is confident of how the statements affect contents of the
1755 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1756 describing it. */
1757 gsi = gsi_for_stmt (call);
1758 gsi_prev (&gsi);
1759 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1760 {
1761 struct ipa_known_agg_contents_list *n, **p;
1762 gimple stmt = gsi_stmt (gsi);
1763 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1764 tree lhs, rhs, lhs_base;
1765
1766 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1767 continue;
1768 if (!gimple_assign_single_p (stmt))
1769 break;
1770
1771 lhs = gimple_assign_lhs (stmt);
1772 rhs = gimple_assign_rhs1 (stmt);
1773 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1774 || TREE_CODE (lhs) == BIT_FIELD_REF
1775 || contains_bitfld_component_ref_p (lhs))
1776 break;
1777
1778 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1779 &lhs_max_size);
1780 if (lhs_max_size == -1
1781 || lhs_max_size != lhs_size)
1782 break;
1783
1784 if (check_ref)
1785 {
1786 if (TREE_CODE (lhs_base) != MEM_REF
1787 || TREE_OPERAND (lhs_base, 0) != arg_base
1788 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1789 break;
1790 }
1791 else if (lhs_base != arg_base)
1792 {
1793 if (DECL_P (lhs_base))
1794 continue;
1795 else
1796 break;
1797 }
1798
1799 bool already_there = false;
1800 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1801 &already_there);
1802 if (!p)
1803 break;
1804 if (already_there)
1805 continue;
1806
1807 rhs = get_ssa_def_if_simple_copy (rhs);
1808 n = XALLOCA (struct ipa_known_agg_contents_list);
1809 n->size = lhs_size;
1810 n->offset = lhs_offset;
1811 if (is_gimple_ip_invariant (rhs))
1812 {
1813 n->constant = rhs;
1814 const_count++;
1815 }
1816 else
1817 n->constant = NULL_TREE;
1818 n->next = *p;
1819 *p = n;
1820
1821 item_count++;
1822 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1823 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1824 break;
1825 }
1826
1827 /* Third stage just goes over the list and creates an appropriate vector of
1828 ipa_agg_jf_item structures out of it, of sourse only if there are
1829 any known constants to begin with. */
1830
1831 if (const_count)
1832 {
1833 jfunc->agg.by_ref = by_ref;
1834 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1835 }
1836 }
1837
1838 static tree
1839 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1840 {
1841 int n;
1842 tree type = (e->callee
1843 ? TREE_TYPE (e->callee->decl)
1844 : gimple_call_fntype (e->call_stmt));
1845 tree t = TYPE_ARG_TYPES (type);
1846
1847 for (n = 0; n < i; n++)
1848 {
1849 if (!t)
1850 break;
1851 t = TREE_CHAIN (t);
1852 }
1853 if (t)
1854 return TREE_VALUE (t);
1855 if (!e->callee)
1856 return NULL;
1857 t = DECL_ARGUMENTS (e->callee->decl);
1858 for (n = 0; n < i; n++)
1859 {
1860 if (!t)
1861 return NULL;
1862 t = TREE_CHAIN (t);
1863 }
1864 if (t)
1865 return TREE_TYPE (t);
1866 return NULL;
1867 }
1868
1869 /* Compute jump function for all arguments of callsite CS and insert the
1870 information in the jump_functions array in the ipa_edge_args corresponding
1871 to this callsite. */
1872
1873 static void
1874 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1875 struct cgraph_edge *cs)
1876 {
1877 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1878 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1879 gimple call = cs->call_stmt;
1880 int n, arg_num = gimple_call_num_args (call);
1881 bool useful_context = false;
1882
1883 if (arg_num == 0 || args->jump_functions)
1884 return;
1885 vec_safe_grow_cleared (args->jump_functions, arg_num);
1886 if (flag_devirtualize)
1887 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1888
1889 if (gimple_call_internal_p (call))
1890 return;
1891 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1892 return;
1893
1894 for (n = 0; n < arg_num; n++)
1895 {
1896 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1897 tree arg = gimple_call_arg (call, n);
1898 tree param_type = ipa_get_callee_param_type (cs, n);
1899 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1900 {
1901 struct ipa_polymorphic_call_context context (cs->caller->decl,
1902 arg, cs->call_stmt,
1903 NULL);
1904 /* TODO: We should also handle dynamic types. */
1905 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1906 if (!context.useless_p ())
1907 useful_context = true;
1908 }
1909
1910 if (is_gimple_ip_invariant (arg))
1911 ipa_set_jf_constant (jfunc, arg, cs);
1912 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1913 && TREE_CODE (arg) == PARM_DECL)
1914 {
1915 int index = ipa_get_param_decl_index (info, arg);
1916
1917 gcc_assert (index >=0);
1918 /* Aggregate passed by value, check for pass-through, otherwise we
1919 will attempt to fill in aggregate contents later in this
1920 for cycle. */
1921 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1922 {
1923 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1924 continue;
1925 }
1926 }
1927 else if (TREE_CODE (arg) == SSA_NAME)
1928 {
1929 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1930 {
1931 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1932 if (index >= 0)
1933 {
1934 bool agg_p, type_p;
1935 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1936 if (param_type && POINTER_TYPE_P (param_type))
1937 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1938 call, jfunc);
1939 else
1940 type_p = false;
1941 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1942 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1943 type_p);
1944 }
1945 }
1946 else
1947 {
1948 gimple stmt = SSA_NAME_DEF_STMT (arg);
1949 if (is_gimple_assign (stmt))
1950 compute_complex_assign_jump_func (fbi, info, jfunc,
1951 call, stmt, arg, param_type);
1952 else if (gimple_code (stmt) == GIMPLE_PHI)
1953 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1954 call, stmt, param_type);
1955 }
1956 }
1957 else
1958 compute_known_type_jump_func (arg, jfunc, call,
1959 param_type
1960 && POINTER_TYPE_P (param_type)
1961 ? TREE_TYPE (param_type)
1962 : NULL);
1963
1964 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1965 passed (because type conversions are ignored in gimple). Usually we can
1966 safely get type from function declaration, but in case of K&R prototypes or
1967 variadic functions we can try our luck with type of the pointer passed.
1968 TODO: Since we look for actual initialization of the memory object, we may better
1969 work out the type based on the memory stores we find. */
1970 if (!param_type)
1971 param_type = TREE_TYPE (arg);
1972
1973 if ((jfunc->type != IPA_JF_PASS_THROUGH
1974 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1975 && (jfunc->type != IPA_JF_ANCESTOR
1976 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1977 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1978 || POINTER_TYPE_P (param_type)))
1979 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1980 }
1981 if (!useful_context)
1982 vec_free (args->polymorphic_call_contexts);
1983 }
1984
1985 /* Compute jump functions for all edges - both direct and indirect - outgoing
1986 from BB. */
1987
1988 static void
1989 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1990 {
1991 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1992 int i;
1993 struct cgraph_edge *cs;
1994
1995 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1996 {
1997 struct cgraph_node *callee = cs->callee;
1998
1999 if (callee)
2000 {
2001 callee->ultimate_alias_target ();
2002 /* We do not need to bother analyzing calls to unknown functions
2003 unless they may become known during lto/whopr. */
2004 if (!callee->definition && !flag_lto)
2005 continue;
2006 }
2007 ipa_compute_jump_functions_for_edge (fbi, cs);
2008 }
2009 }
2010
2011 /* If STMT looks like a statement loading a value from a member pointer formal
2012 parameter, return that parameter and store the offset of the field to
2013 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2014 might be clobbered). If USE_DELTA, then we look for a use of the delta
2015 field rather than the pfn. */
2016
2017 static tree
2018 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
2019 HOST_WIDE_INT *offset_p)
2020 {
2021 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2022
2023 if (!gimple_assign_single_p (stmt))
2024 return NULL_TREE;
2025
2026 rhs = gimple_assign_rhs1 (stmt);
2027 if (TREE_CODE (rhs) == COMPONENT_REF)
2028 {
2029 ref_field = TREE_OPERAND (rhs, 1);
2030 rhs = TREE_OPERAND (rhs, 0);
2031 }
2032 else
2033 ref_field = NULL_TREE;
2034 if (TREE_CODE (rhs) != MEM_REF)
2035 return NULL_TREE;
2036 rec = TREE_OPERAND (rhs, 0);
2037 if (TREE_CODE (rec) != ADDR_EXPR)
2038 return NULL_TREE;
2039 rec = TREE_OPERAND (rec, 0);
2040 if (TREE_CODE (rec) != PARM_DECL
2041 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2042 return NULL_TREE;
2043 ref_offset = TREE_OPERAND (rhs, 1);
2044
2045 if (use_delta)
2046 fld = delta_field;
2047 else
2048 fld = ptr_field;
2049 if (offset_p)
2050 *offset_p = int_bit_position (fld);
2051
2052 if (ref_field)
2053 {
2054 if (integer_nonzerop (ref_offset))
2055 return NULL_TREE;
2056 return ref_field == fld ? rec : NULL_TREE;
2057 }
2058 else
2059 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2060 : NULL_TREE;
2061 }
2062
2063 /* Returns true iff T is an SSA_NAME defined by a statement. */
2064
2065 static bool
2066 ipa_is_ssa_with_stmt_def (tree t)
2067 {
2068 if (TREE_CODE (t) == SSA_NAME
2069 && !SSA_NAME_IS_DEFAULT_DEF (t))
2070 return true;
2071 else
2072 return false;
2073 }
2074
2075 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2076 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2077 indirect call graph edge. */
2078
2079 static struct cgraph_edge *
2080 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
2081 {
2082 struct cgraph_edge *cs;
2083
2084 cs = node->get_edge (stmt);
2085 cs->indirect_info->param_index = param_index;
2086 cs->indirect_info->agg_contents = 0;
2087 cs->indirect_info->member_ptr = 0;
2088 return cs;
2089 }
2090
2091 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2092 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2093 intermediate information about each formal parameter. Currently it checks
2094 whether the call calls a pointer that is a formal parameter and if so, the
2095 parameter is marked with the called flag and an indirect call graph edge
2096 describing the call is created. This is very simple for ordinary pointers
2097 represented in SSA but not-so-nice when it comes to member pointers. The
2098 ugly part of this function does nothing more than trying to match the
2099 pattern of such a call. An example of such a pattern is the gimple dump
2100 below, the call is on the last line:
2101
2102 <bb 2>:
2103 f$__delta_5 = f.__delta;
2104 f$__pfn_24 = f.__pfn;
2105
2106 or
2107 <bb 2>:
2108 f$__delta_5 = MEM[(struct *)&f];
2109 f$__pfn_24 = MEM[(struct *)&f + 4B];
2110
2111 and a few lines below:
2112
2113 <bb 5>
2114 D.2496_3 = (int) f$__pfn_24;
2115 D.2497_4 = D.2496_3 & 1;
2116 if (D.2497_4 != 0)
2117 goto <bb 3>;
2118 else
2119 goto <bb 4>;
2120
2121 <bb 6>:
2122 D.2500_7 = (unsigned int) f$__delta_5;
2123 D.2501_8 = &S + D.2500_7;
2124 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2125 D.2503_10 = *D.2502_9;
2126 D.2504_12 = f$__pfn_24 + -1;
2127 D.2505_13 = (unsigned int) D.2504_12;
2128 D.2506_14 = D.2503_10 + D.2505_13;
2129 D.2507_15 = *D.2506_14;
2130 iftmp.11_16 = (String:: *) D.2507_15;
2131
2132 <bb 7>:
2133 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2134 D.2500_19 = (unsigned int) f$__delta_5;
2135 D.2508_20 = &S + D.2500_19;
2136 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2137
2138 Such patterns are results of simple calls to a member pointer:
2139
2140 int doprinting (int (MyString::* f)(int) const)
2141 {
2142 MyString S ("somestring");
2143
2144 return (S.*f)(4);
2145 }
2146
2147 Moreover, the function also looks for called pointers loaded from aggregates
2148 passed by value or reference. */
2149
2150 static void
2151 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2152 tree target)
2153 {
2154 struct ipa_node_params *info = fbi->info;
2155 HOST_WIDE_INT offset;
2156 bool by_ref;
2157
2158 if (SSA_NAME_IS_DEFAULT_DEF (target))
2159 {
2160 tree var = SSA_NAME_VAR (target);
2161 int index = ipa_get_param_decl_index (info, var);
2162 if (index >= 0)
2163 ipa_note_param_call (fbi->node, index, call);
2164 return;
2165 }
2166
2167 int index;
2168 gimple def = SSA_NAME_DEF_STMT (target);
2169 if (gimple_assign_single_p (def)
2170 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2171 gimple_assign_rhs1 (def), &index, &offset,
2172 NULL, &by_ref))
2173 {
2174 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2175 cs->indirect_info->offset = offset;
2176 cs->indirect_info->agg_contents = 1;
2177 cs->indirect_info->by_ref = by_ref;
2178 return;
2179 }
2180
2181 /* Now we need to try to match the complex pattern of calling a member
2182 pointer. */
2183 if (gimple_code (def) != GIMPLE_PHI
2184 || gimple_phi_num_args (def) != 2
2185 || !POINTER_TYPE_P (TREE_TYPE (target))
2186 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2187 return;
2188
2189 /* First, we need to check whether one of these is a load from a member
2190 pointer that is a parameter to this function. */
2191 tree n1 = PHI_ARG_DEF (def, 0);
2192 tree n2 = PHI_ARG_DEF (def, 1);
2193 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2194 return;
2195 gimple d1 = SSA_NAME_DEF_STMT (n1);
2196 gimple d2 = SSA_NAME_DEF_STMT (n2);
2197
2198 tree rec;
2199 basic_block bb, virt_bb;
2200 basic_block join = gimple_bb (def);
2201 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2202 {
2203 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2204 return;
2205
2206 bb = EDGE_PRED (join, 0)->src;
2207 virt_bb = gimple_bb (d2);
2208 }
2209 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2210 {
2211 bb = EDGE_PRED (join, 1)->src;
2212 virt_bb = gimple_bb (d1);
2213 }
2214 else
2215 return;
2216
2217 /* Second, we need to check that the basic blocks are laid out in the way
2218 corresponding to the pattern. */
2219
2220 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2221 || single_pred (virt_bb) != bb
2222 || single_succ (virt_bb) != join)
2223 return;
2224
2225 /* Third, let's see that the branching is done depending on the least
2226 significant bit of the pfn. */
2227
2228 gimple branch = last_stmt (bb);
2229 if (!branch || gimple_code (branch) != GIMPLE_COND)
2230 return;
2231
2232 if ((gimple_cond_code (branch) != NE_EXPR
2233 && gimple_cond_code (branch) != EQ_EXPR)
2234 || !integer_zerop (gimple_cond_rhs (branch)))
2235 return;
2236
2237 tree cond = gimple_cond_lhs (branch);
2238 if (!ipa_is_ssa_with_stmt_def (cond))
2239 return;
2240
2241 def = SSA_NAME_DEF_STMT (cond);
2242 if (!is_gimple_assign (def)
2243 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2244 || !integer_onep (gimple_assign_rhs2 (def)))
2245 return;
2246
2247 cond = gimple_assign_rhs1 (def);
2248 if (!ipa_is_ssa_with_stmt_def (cond))
2249 return;
2250
2251 def = SSA_NAME_DEF_STMT (cond);
2252
2253 if (is_gimple_assign (def)
2254 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2255 {
2256 cond = gimple_assign_rhs1 (def);
2257 if (!ipa_is_ssa_with_stmt_def (cond))
2258 return;
2259 def = SSA_NAME_DEF_STMT (cond);
2260 }
2261
2262 tree rec2;
2263 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2264 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2265 == ptrmemfunc_vbit_in_delta),
2266 NULL);
2267 if (rec != rec2)
2268 return;
2269
2270 index = ipa_get_param_decl_index (info, rec);
2271 if (index >= 0
2272 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2273 {
2274 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2275 cs->indirect_info->offset = offset;
2276 cs->indirect_info->agg_contents = 1;
2277 cs->indirect_info->member_ptr = 1;
2278 }
2279
2280 return;
2281 }
2282
2283 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2284 object referenced in the expression is a formal parameter of the caller
2285 FBI->node (described by FBI->info), create a call note for the
2286 statement. */
2287
2288 static void
2289 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2290 gimple call, tree target)
2291 {
2292 tree obj = OBJ_TYPE_REF_OBJECT (target);
2293 int index;
2294 HOST_WIDE_INT anc_offset;
2295
2296 if (!flag_devirtualize)
2297 return;
2298
2299 if (TREE_CODE (obj) != SSA_NAME)
2300 return;
2301
2302 struct ipa_node_params *info = fbi->info;
2303 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2304 {
2305 struct ipa_jump_func jfunc;
2306 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2307 return;
2308
2309 anc_offset = 0;
2310 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2311 gcc_assert (index >= 0);
2312 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2313 call, &jfunc))
2314 return;
2315 }
2316 else
2317 {
2318 struct ipa_jump_func jfunc;
2319 gimple stmt = SSA_NAME_DEF_STMT (obj);
2320 tree expr;
2321
2322 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2323 if (!expr)
2324 return;
2325 index = ipa_get_param_decl_index (info,
2326 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2327 gcc_assert (index >= 0);
2328 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2329 call, &jfunc, anc_offset))
2330 return;
2331 }
2332
2333 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2334 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2335 ii->offset = anc_offset;
2336 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2337 ii->otr_type = obj_type_ref_class (target);
2338 ii->polymorphic = 1;
2339 }
2340
2341 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2342 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2343 containing intermediate information about each formal parameter. */
2344
2345 static void
2346 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2347 {
2348 tree target = gimple_call_fn (call);
2349
2350 if (!target
2351 || (TREE_CODE (target) != SSA_NAME
2352 && !virtual_method_call_p (target)))
2353 return;
2354
2355 struct cgraph_edge *cs = fbi->node->get_edge (call);
2356 /* If we previously turned the call into a direct call, there is
2357 no need to analyze. */
2358 if (cs && !cs->indirect_unknown_callee)
2359 return;
2360
2361 if (cs->indirect_info->polymorphic)
2362 {
2363 tree instance;
2364 tree target = gimple_call_fn (call);
2365 ipa_polymorphic_call_context context (current_function_decl,
2366 target, call, &instance);
2367
2368 gcc_checking_assert (cs->indirect_info->otr_type
2369 == obj_type_ref_class (target));
2370 gcc_checking_assert (cs->indirect_info->otr_token
2371 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2372
2373 if (context.get_dynamic_type (instance,
2374 OBJ_TYPE_REF_OBJECT (target),
2375 obj_type_ref_class (target), call))
2376 cs->indirect_info->context = context;
2377 }
2378
2379 if (TREE_CODE (target) == SSA_NAME)
2380 ipa_analyze_indirect_call_uses (fbi, call, target);
2381 else if (virtual_method_call_p (target))
2382 ipa_analyze_virtual_call_uses (fbi, call, target);
2383 }
2384
2385
2386 /* Analyze the call statement STMT with respect to formal parameters (described
2387 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2388 formal parameters are called. */
2389
2390 static void
2391 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2392 {
2393 if (is_gimple_call (stmt))
2394 ipa_analyze_call_uses (fbi, stmt);
2395 }
2396
2397 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2398 If OP is a parameter declaration, mark it as used in the info structure
2399 passed in DATA. */
2400
2401 static bool
2402 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2403 {
2404 struct ipa_node_params *info = (struct ipa_node_params *) data;
2405
2406 op = get_base_address (op);
2407 if (op
2408 && TREE_CODE (op) == PARM_DECL)
2409 {
2410 int index = ipa_get_param_decl_index (info, op);
2411 gcc_assert (index >= 0);
2412 ipa_set_param_used (info, index, true);
2413 }
2414
2415 return false;
2416 }
2417
2418 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2419 the findings in various structures of the associated ipa_node_params
2420 structure, such as parameter flags, notes etc. FBI holds various data about
2421 the function being analyzed. */
2422
2423 static void
2424 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2425 {
2426 gimple_stmt_iterator gsi;
2427 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2428 {
2429 gimple stmt = gsi_stmt (gsi);
2430
2431 if (is_gimple_debug (stmt))
2432 continue;
2433
2434 ipa_analyze_stmt_uses (fbi, stmt);
2435 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2436 visit_ref_for_mod_analysis,
2437 visit_ref_for_mod_analysis,
2438 visit_ref_for_mod_analysis);
2439 }
2440 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2441 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2442 visit_ref_for_mod_analysis,
2443 visit_ref_for_mod_analysis,
2444 visit_ref_for_mod_analysis);
2445 }
2446
2447 /* Calculate controlled uses of parameters of NODE. */
2448
2449 static void
2450 ipa_analyze_controlled_uses (struct cgraph_node *node)
2451 {
2452 struct ipa_node_params *info = IPA_NODE_REF (node);
2453
2454 for (int i = 0; i < ipa_get_param_count (info); i++)
2455 {
2456 tree parm = ipa_get_param (info, i);
2457 int controlled_uses = 0;
2458
2459 /* For SSA regs see if parameter is used. For non-SSA we compute
2460 the flag during modification analysis. */
2461 if (is_gimple_reg (parm))
2462 {
2463 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2464 parm);
2465 if (ddef && !has_zero_uses (ddef))
2466 {
2467 imm_use_iterator imm_iter;
2468 use_operand_p use_p;
2469
2470 ipa_set_param_used (info, i, true);
2471 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2472 if (!is_gimple_call (USE_STMT (use_p)))
2473 {
2474 if (!is_gimple_debug (USE_STMT (use_p)))
2475 {
2476 controlled_uses = IPA_UNDESCRIBED_USE;
2477 break;
2478 }
2479 }
2480 else
2481 controlled_uses++;
2482 }
2483 else
2484 controlled_uses = 0;
2485 }
2486 else
2487 controlled_uses = IPA_UNDESCRIBED_USE;
2488 ipa_set_controlled_uses (info, i, controlled_uses);
2489 }
2490 }
2491
2492 /* Free stuff in BI. */
2493
2494 static void
2495 free_ipa_bb_info (struct ipa_bb_info *bi)
2496 {
2497 bi->cg_edges.release ();
2498 bi->param_aa_statuses.release ();
2499 }
2500
2501 /* Dominator walker driving the analysis. */
2502
2503 class analysis_dom_walker : public dom_walker
2504 {
2505 public:
2506 analysis_dom_walker (struct func_body_info *fbi)
2507 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2508
2509 virtual void before_dom_children (basic_block);
2510
2511 private:
2512 struct func_body_info *m_fbi;
2513 };
2514
2515 void
2516 analysis_dom_walker::before_dom_children (basic_block bb)
2517 {
2518 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2519 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2520 }
2521
2522 /* Initialize the array describing properties of of formal parameters
2523 of NODE, analyze their uses and compute jump functions associated
2524 with actual arguments of calls from within NODE. */
2525
2526 void
2527 ipa_analyze_node (struct cgraph_node *node)
2528 {
2529 struct func_body_info fbi;
2530 struct ipa_node_params *info;
2531
2532 ipa_check_create_node_params ();
2533 ipa_check_create_edge_args ();
2534 info = IPA_NODE_REF (node);
2535
2536 if (info->analysis_done)
2537 return;
2538 info->analysis_done = 1;
2539
2540 if (ipa_func_spec_opts_forbid_analysis_p (node))
2541 {
2542 for (int i = 0; i < ipa_get_param_count (info); i++)
2543 {
2544 ipa_set_param_used (info, i, true);
2545 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2546 }
2547 return;
2548 }
2549
2550 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2551 push_cfun (func);
2552 calculate_dominance_info (CDI_DOMINATORS);
2553 ipa_initialize_node_params (node);
2554 ipa_analyze_controlled_uses (node);
2555
2556 fbi.node = node;
2557 fbi.info = IPA_NODE_REF (node);
2558 fbi.bb_infos = vNULL;
2559 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2560 fbi.param_count = ipa_get_param_count (info);
2561 fbi.aa_walked = 0;
2562
2563 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2564 {
2565 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2566 bi->cg_edges.safe_push (cs);
2567 }
2568
2569 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2570 {
2571 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2572 bi->cg_edges.safe_push (cs);
2573 }
2574
2575 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2576
2577 int i;
2578 struct ipa_bb_info *bi;
2579 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2580 free_ipa_bb_info (bi);
2581 fbi.bb_infos.release ();
2582 free_dominance_info (CDI_DOMINATORS);
2583 pop_cfun ();
2584 }
2585
2586 /* Update the jump function DST when the call graph edge corresponding to SRC is
2587 is being inlined, knowing that DST is of type ancestor and src of known
2588 type. */
2589
2590 static void
2591 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2592 struct ipa_jump_func *dst)
2593 {
2594 HOST_WIDE_INT combined_offset;
2595 tree combined_type;
2596
2597 if (!ipa_get_jf_ancestor_type_preserved (dst))
2598 {
2599 dst->type = IPA_JF_UNKNOWN;
2600 return;
2601 }
2602
2603 combined_offset = ipa_get_jf_known_type_offset (src)
2604 + ipa_get_jf_ancestor_offset (dst);
2605 combined_type = ipa_get_jf_ancestor_type (dst);
2606
2607 ipa_set_jf_known_type (dst, combined_offset,
2608 ipa_get_jf_known_type_base_type (src),
2609 combined_type);
2610 }
2611
2612 /* Update the jump functions associated with call graph edge E when the call
2613 graph edge CS is being inlined, assuming that E->caller is already (possibly
2614 indirectly) inlined into CS->callee and that E has not been inlined. */
2615
2616 static void
2617 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2618 struct cgraph_edge *e)
2619 {
2620 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2621 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2622 int count = ipa_get_cs_argument_count (args);
2623 int i;
2624
2625 for (i = 0; i < count; i++)
2626 {
2627 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2628 struct ipa_polymorphic_call_context *dst_ctx
2629 = ipa_get_ith_polymorhic_call_context (args, i);
2630
2631 if (dst->type == IPA_JF_ANCESTOR)
2632 {
2633 struct ipa_jump_func *src;
2634 int dst_fid = dst->value.ancestor.formal_id;
2635 struct ipa_polymorphic_call_context *src_ctx
2636 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2637
2638 /* Variable number of arguments can cause havoc if we try to access
2639 one that does not exist in the inlined edge. So make sure we
2640 don't. */
2641 if (dst_fid >= ipa_get_cs_argument_count (top))
2642 {
2643 dst->type = IPA_JF_UNKNOWN;
2644 continue;
2645 }
2646
2647 src = ipa_get_ith_jump_func (top, dst_fid);
2648
2649 if (src_ctx && !src_ctx->useless_p ())
2650 {
2651 struct ipa_polymorphic_call_context ctx = *src_ctx;
2652
2653 /* TODO: Make type preserved safe WRT contexts. */
2654 if (!dst->value.ancestor.agg_preserved)
2655 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2656 ctx.offset_by (dst->value.ancestor.offset);
2657 if (!ctx.useless_p ())
2658 {
2659 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2660 count);
2661 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2662 }
2663 }
2664
2665 if (src->agg.items
2666 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2667 {
2668 struct ipa_agg_jf_item *item;
2669 int j;
2670
2671 /* Currently we do not produce clobber aggregate jump functions,
2672 replace with merging when we do. */
2673 gcc_assert (!dst->agg.items);
2674
2675 dst->agg.items = vec_safe_copy (src->agg.items);
2676 dst->agg.by_ref = src->agg.by_ref;
2677 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2678 item->offset -= dst->value.ancestor.offset;
2679 }
2680
2681 if (src->type == IPA_JF_KNOWN_TYPE)
2682 combine_known_type_and_ancestor_jfs (src, dst);
2683 else if (src->type == IPA_JF_PASS_THROUGH
2684 && src->value.pass_through.operation == NOP_EXPR)
2685 {
2686 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2687 dst->value.ancestor.agg_preserved &=
2688 src->value.pass_through.agg_preserved;
2689 dst->value.ancestor.type_preserved &=
2690 src->value.pass_through.type_preserved;
2691 }
2692 else if (src->type == IPA_JF_ANCESTOR)
2693 {
2694 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2695 dst->value.ancestor.offset += src->value.ancestor.offset;
2696 dst->value.ancestor.agg_preserved &=
2697 src->value.ancestor.agg_preserved;
2698 dst->value.ancestor.type_preserved &=
2699 src->value.ancestor.type_preserved;
2700 }
2701 else
2702 dst->type = IPA_JF_UNKNOWN;
2703 }
2704 else if (dst->type == IPA_JF_PASS_THROUGH)
2705 {
2706 struct ipa_jump_func *src;
2707 /* We must check range due to calls with variable number of arguments
2708 and we cannot combine jump functions with operations. */
2709 if (dst->value.pass_through.operation == NOP_EXPR
2710 && (dst->value.pass_through.formal_id
2711 < ipa_get_cs_argument_count (top)))
2712 {
2713 int dst_fid = dst->value.pass_through.formal_id;
2714 src = ipa_get_ith_jump_func (top, dst_fid);
2715 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2716 struct ipa_polymorphic_call_context *src_ctx
2717 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2718
2719 if (src_ctx && !src_ctx->useless_p ())
2720 {
2721 struct ipa_polymorphic_call_context ctx = *src_ctx;
2722
2723 /* TODO: Make type preserved safe WRT contexts. */
2724 if (!dst->value.ancestor.agg_preserved)
2725 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2726 if (!ctx.useless_p ())
2727 {
2728 if (!dst_ctx)
2729 {
2730 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2731 count);
2732 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2733 }
2734 dst_ctx->combine_with (ctx);
2735 }
2736 }
2737 switch (src->type)
2738 {
2739 case IPA_JF_UNKNOWN:
2740 dst->type = IPA_JF_UNKNOWN;
2741 break;
2742 case IPA_JF_KNOWN_TYPE:
2743 if (ipa_get_jf_pass_through_type_preserved (dst))
2744 ipa_set_jf_known_type (dst,
2745 ipa_get_jf_known_type_offset (src),
2746 ipa_get_jf_known_type_base_type (src),
2747 ipa_get_jf_known_type_component_type (src));
2748 else
2749 dst->type = IPA_JF_UNKNOWN;
2750 break;
2751 case IPA_JF_CONST:
2752 ipa_set_jf_cst_copy (dst, src);
2753 break;
2754
2755 case IPA_JF_PASS_THROUGH:
2756 {
2757 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2758 enum tree_code operation;
2759 operation = ipa_get_jf_pass_through_operation (src);
2760
2761 if (operation == NOP_EXPR)
2762 {
2763 bool agg_p, type_p;
2764 agg_p = dst_agg_p
2765 && ipa_get_jf_pass_through_agg_preserved (src);
2766 type_p = ipa_get_jf_pass_through_type_preserved (src)
2767 && ipa_get_jf_pass_through_type_preserved (dst);
2768 ipa_set_jf_simple_pass_through (dst, formal_id,
2769 agg_p, type_p);
2770 }
2771 else
2772 {
2773 tree operand = ipa_get_jf_pass_through_operand (src);
2774 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2775 operation);
2776 }
2777 break;
2778 }
2779 case IPA_JF_ANCESTOR:
2780 {
2781 bool agg_p, type_p;
2782 agg_p = dst_agg_p
2783 && ipa_get_jf_ancestor_agg_preserved (src);
2784 type_p = ipa_get_jf_ancestor_type_preserved (src)
2785 && ipa_get_jf_pass_through_type_preserved (dst);
2786 ipa_set_ancestor_jf (dst,
2787 ipa_get_jf_ancestor_offset (src),
2788 ipa_get_jf_ancestor_type (src),
2789 ipa_get_jf_ancestor_formal_id (src),
2790 agg_p, type_p);
2791 break;
2792 }
2793 default:
2794 gcc_unreachable ();
2795 }
2796
2797 if (src->agg.items
2798 && (dst_agg_p || !src->agg.by_ref))
2799 {
2800 /* Currently we do not produce clobber aggregate jump
2801 functions, replace with merging when we do. */
2802 gcc_assert (!dst->agg.items);
2803
2804 dst->agg.by_ref = src->agg.by_ref;
2805 dst->agg.items = vec_safe_copy (src->agg.items);
2806 }
2807 }
2808 else
2809 dst->type = IPA_JF_UNKNOWN;
2810 }
2811 }
2812 }
2813
2814 /* If TARGET is an addr_expr of a function declaration, make it the
2815 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2816 Otherwise, return NULL. */
2817
2818 struct cgraph_edge *
2819 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2820 bool speculative)
2821 {
2822 struct cgraph_node *callee;
2823 struct inline_edge_summary *es = inline_edge_summary (ie);
2824 bool unreachable = false;
2825
2826 if (TREE_CODE (target) == ADDR_EXPR)
2827 target = TREE_OPERAND (target, 0);
2828 if (TREE_CODE (target) != FUNCTION_DECL)
2829 {
2830 target = canonicalize_constructor_val (target, NULL);
2831 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2832 {
2833 if (ie->indirect_info->member_ptr)
2834 /* Member pointer call that goes through a VMT lookup. */
2835 return NULL;
2836
2837 if (dump_enabled_p ())
2838 {
2839 location_t loc = gimple_location_safe (ie->call_stmt);
2840 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2841 "discovered direct call to non-function in %s/%i, "
2842 "making it __builtin_unreachable\n",
2843 ie->caller->name (), ie->caller->order);
2844 }
2845
2846 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2847 callee = cgraph_node::get_create (target);
2848 unreachable = true;
2849 }
2850 else
2851 callee = cgraph_node::get (target);
2852 }
2853 else
2854 callee = cgraph_node::get (target);
2855
2856 /* Because may-edges are not explicitely represented and vtable may be external,
2857 we may create the first reference to the object in the unit. */
2858 if (!callee || callee->global.inlined_to)
2859 {
2860
2861 /* We are better to ensure we can refer to it.
2862 In the case of static functions we are out of luck, since we already
2863 removed its body. In the case of public functions we may or may
2864 not introduce the reference. */
2865 if (!canonicalize_constructor_val (target, NULL)
2866 || !TREE_PUBLIC (target))
2867 {
2868 if (dump_file)
2869 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2870 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2871 xstrdup (ie->caller->name ()),
2872 ie->caller->order,
2873 xstrdup (ie->callee->name ()),
2874 ie->callee->order);
2875 return NULL;
2876 }
2877 callee = cgraph_node::get_create (target);
2878 }
2879
2880 if (!dbg_cnt (devirt))
2881 return NULL;
2882
2883 ipa_check_create_node_params ();
2884
2885 /* We can not make edges to inline clones. It is bug that someone removed
2886 the cgraph node too early. */
2887 gcc_assert (!callee->global.inlined_to);
2888
2889 if (dump_file && !unreachable)
2890 {
2891 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2892 "(%s/%i -> %s/%i), for stmt ",
2893 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2894 speculative ? "speculative" : "known",
2895 xstrdup (ie->caller->name ()),
2896 ie->caller->order,
2897 xstrdup (callee->name ()),
2898 callee->order);
2899 if (ie->call_stmt)
2900 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2901 else
2902 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2903 }
2904 if (dump_enabled_p ())
2905 {
2906 location_t loc = gimple_location_safe (ie->call_stmt);
2907
2908 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2909 "converting indirect call in %s to direct call to %s\n",
2910 ie->caller->name (), callee->name ());
2911 }
2912 if (!speculative)
2913 ie = ie->make_direct (callee);
2914 else
2915 {
2916 if (!callee->can_be_discarded_p ())
2917 {
2918 cgraph_node *alias;
2919 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2920 if (alias)
2921 callee = alias;
2922 }
2923 ie = ie->make_speculative
2924 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2925 }
2926 es = inline_edge_summary (ie);
2927 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2928 - eni_size_weights.call_cost);
2929 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2930 - eni_time_weights.call_cost);
2931
2932 return ie;
2933 }
2934
2935 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2936 return NULL if there is not any. BY_REF specifies whether the value has to
2937 be passed by reference or by value. */
2938
2939 tree
2940 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2941 HOST_WIDE_INT offset, bool by_ref)
2942 {
2943 struct ipa_agg_jf_item *item;
2944 int i;
2945
2946 if (by_ref != agg->by_ref)
2947 return NULL;
2948
2949 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2950 if (item->offset == offset)
2951 {
2952 /* Currently we do not have clobber values, return NULL for them once
2953 we do. */
2954 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2955 return item->value;
2956 }
2957 return NULL;
2958 }
2959
2960 /* Remove a reference to SYMBOL from the list of references of a node given by
2961 reference description RDESC. Return true if the reference has been
2962 successfully found and removed. */
2963
2964 static bool
2965 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2966 {
2967 struct ipa_ref *to_del;
2968 struct cgraph_edge *origin;
2969
2970 origin = rdesc->cs;
2971 if (!origin)
2972 return false;
2973 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2974 origin->lto_stmt_uid);
2975 if (!to_del)
2976 return false;
2977
2978 to_del->remove_reference ();
2979 if (dump_file)
2980 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2981 xstrdup (origin->caller->name ()),
2982 origin->caller->order, xstrdup (symbol->name ()));
2983 return true;
2984 }
2985
2986 /* If JFUNC has a reference description with refcount different from
2987 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2988 NULL. JFUNC must be a constant jump function. */
2989
2990 static struct ipa_cst_ref_desc *
2991 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2992 {
2993 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2994 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2995 return rdesc;
2996 else
2997 return NULL;
2998 }
2999
3000 /* If the value of constant jump function JFUNC is an address of a function
3001 declaration, return the associated call graph node. Otherwise return
3002 NULL. */
3003
3004 static cgraph_node *
3005 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3006 {
3007 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3008 tree cst = ipa_get_jf_constant (jfunc);
3009 if (TREE_CODE (cst) != ADDR_EXPR
3010 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3011 return NULL;
3012
3013 return cgraph_node::get (TREE_OPERAND (cst, 0));
3014 }
3015
3016
3017 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3018 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3019 the edge specified in the rdesc. Return false if either the symbol or the
3020 reference could not be found, otherwise return true. */
3021
3022 static bool
3023 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3024 {
3025 struct ipa_cst_ref_desc *rdesc;
3026 if (jfunc->type == IPA_JF_CONST
3027 && (rdesc = jfunc_rdesc_usable (jfunc))
3028 && --rdesc->refcount == 0)
3029 {
3030 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3031 if (!symbol)
3032 return false;
3033
3034 return remove_described_reference (symbol, rdesc);
3035 }
3036 return true;
3037 }
3038
3039 /* Try to find a destination for indirect edge IE that corresponds to a simple
3040 call or a call of a member function pointer and where the destination is a
3041 pointer formal parameter described by jump function JFUNC. If it can be
3042 determined, return the newly direct edge, otherwise return NULL.
3043 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3044
3045 static struct cgraph_edge *
3046 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3047 struct ipa_jump_func *jfunc,
3048 struct ipa_node_params *new_root_info)
3049 {
3050 struct cgraph_edge *cs;
3051 tree target;
3052 bool agg_contents = ie->indirect_info->agg_contents;
3053
3054 if (ie->indirect_info->agg_contents)
3055 target = ipa_find_agg_cst_for_param (&jfunc->agg,
3056 ie->indirect_info->offset,
3057 ie->indirect_info->by_ref);
3058 else
3059 target = ipa_value_from_jfunc (new_root_info, jfunc);
3060 if (!target)
3061 return NULL;
3062 cs = ipa_make_edge_direct_to_target (ie, target);
3063
3064 if (cs && !agg_contents)
3065 {
3066 bool ok;
3067 gcc_checking_assert (cs->callee
3068 && (cs != ie
3069 || jfunc->type != IPA_JF_CONST
3070 || !cgraph_node_for_jfunc (jfunc)
3071 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3072 ok = try_decrement_rdesc_refcount (jfunc);
3073 gcc_checking_assert (ok);
3074 }
3075
3076 return cs;
3077 }
3078
3079 /* Return the target to be used in cases of impossible devirtualization. IE
3080 and target (the latter can be NULL) are dumped when dumping is enabled. */
3081
3082 tree
3083 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3084 {
3085 if (dump_file)
3086 {
3087 if (target)
3088 fprintf (dump_file,
3089 "Type inconsistent devirtualization: %s/%i->%s\n",
3090 ie->caller->name (), ie->caller->order,
3091 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3092 else
3093 fprintf (dump_file,
3094 "No devirtualization target in %s/%i\n",
3095 ie->caller->name (), ie->caller->order);
3096 }
3097 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3098 cgraph_node::get_create (new_target);
3099 return new_target;
3100 }
3101
3102 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3103 call based on a formal parameter which is described by jump function JFUNC
3104 and if it can be determined, make it direct and return the direct edge.
3105 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3106 are relative to. */
3107
3108 static struct cgraph_edge *
3109 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3110 struct ipa_jump_func *jfunc,
3111 struct ipa_node_params *new_root_info,
3112 struct ipa_polymorphic_call_context *ctx_ptr)
3113 {
3114 tree binfo, target = NULL;
3115 bool speculative = false;
3116 bool updated = false;
3117
3118 if (!flag_devirtualize)
3119 return NULL;
3120
3121 /* If this is call of a function parameter, restrict its type
3122 based on knowlede of the context. */
3123 if (ctx_ptr && !ie->indirect_info->by_ref)
3124 {
3125 struct ipa_polymorphic_call_context ctx = *ctx_ptr;
3126
3127 ctx.offset_by (ie->indirect_info->offset);
3128
3129 /* TODO: We want to record if type change happens.
3130 Old code did not do that that seems like a bug. */
3131 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3132 ie->indirect_info->otr_type);
3133
3134 updated = ie->indirect_info->context.combine_with
3135 (ctx, ie->indirect_info->otr_type);
3136 }
3137
3138 /* Try to do lookup via known virtual table pointer value. */
3139 if (!ie->indirect_info->by_ref)
3140 {
3141 tree vtable;
3142 unsigned HOST_WIDE_INT offset;
3143 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
3144 ie->indirect_info->offset,
3145 true);
3146 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3147 {
3148 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3149 vtable, offset);
3150 if (target)
3151 {
3152 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
3153 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
3154 || !possible_polymorphic_call_target_p
3155 (ie, cgraph_node::get (target)))
3156 target = ipa_impossible_devirt_target (ie, target);
3157 return ipa_make_edge_direct_to_target (ie, target);
3158 }
3159 }
3160 }
3161
3162 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
3163
3164 if (binfo && TREE_CODE (binfo) != TREE_BINFO)
3165 {
3166 struct ipa_polymorphic_call_context ctx (binfo,
3167 ie->indirect_info->otr_type,
3168 ie->indirect_info->offset);
3169 updated |= ie->indirect_info->context.combine_with
3170 (ctx, ie->indirect_info->otr_type);
3171 }
3172
3173 if (updated)
3174 {
3175 ipa_polymorphic_call_context context (ie);
3176 vec <cgraph_node *>targets;
3177 bool final;
3178
3179 targets = possible_polymorphic_call_targets
3180 (ie->indirect_info->otr_type,
3181 ie->indirect_info->otr_token,
3182 context, &final);
3183 if (final && targets.length () <= 1)
3184 {
3185 if (targets.length () == 1)
3186 target = targets[0]->decl;
3187 else
3188 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3189 }
3190 else if (flag_devirtualize_speculatively
3191 && !ie->speculative && ie->maybe_hot_p ())
3192 {
3193 cgraph_node *n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3194 ie->indirect_info->otr_token,
3195 ie->indirect_info->context);
3196 if (n)
3197 {
3198 target = n->decl;
3199 speculative = true;
3200 }
3201 }
3202 }
3203
3204 if (binfo && TREE_CODE (binfo) == TREE_BINFO)
3205 {
3206 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3207 ie->indirect_info->otr_type);
3208 if (binfo)
3209 {
3210 tree t = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3211 binfo);
3212 if (t)
3213 {
3214 gcc_assert (!target || speculative || target == t);
3215 target = t;
3216 speculative = false;
3217 }
3218 }
3219 }
3220
3221 if (target)
3222 {
3223 if (!possible_polymorphic_call_target_p (ie, cgraph_node::get_create (target)))
3224 target = ipa_impossible_devirt_target (ie, target);
3225 return ipa_make_edge_direct_to_target (ie, target, speculative);
3226 }
3227 else
3228 return NULL;
3229 }
3230
3231 /* Update the param called notes associated with NODE when CS is being inlined,
3232 assuming NODE is (potentially indirectly) inlined into CS->callee.
3233 Moreover, if the callee is discovered to be constant, create a new cgraph
3234 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3235 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3236
3237 static bool
3238 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3239 struct cgraph_node *node,
3240 vec<cgraph_edge *> *new_edges)
3241 {
3242 struct ipa_edge_args *top;
3243 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3244 struct ipa_node_params *new_root_info;
3245 bool res = false;
3246
3247 ipa_check_create_edge_args ();
3248 top = IPA_EDGE_REF (cs);
3249 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3250 ? cs->caller->global.inlined_to
3251 : cs->caller);
3252
3253 for (ie = node->indirect_calls; ie; ie = next_ie)
3254 {
3255 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3256 struct ipa_jump_func *jfunc;
3257 int param_index;
3258
3259 next_ie = ie->next_callee;
3260
3261 if (ici->param_index == -1)
3262 continue;
3263
3264 /* We must check range due to calls with variable number of arguments: */
3265 if (ici->param_index >= ipa_get_cs_argument_count (top))
3266 {
3267 ici->param_index = -1;
3268 continue;
3269 }
3270
3271 param_index = ici->param_index;
3272 jfunc = ipa_get_ith_jump_func (top, param_index);
3273
3274 if (!flag_indirect_inlining)
3275 new_direct_edge = NULL;
3276 else if (ici->polymorphic)
3277 {
3278 ipa_polymorphic_call_context *ctx;
3279 ctx = ipa_get_ith_polymorhic_call_context (top, param_index);
3280 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3281 new_root_info,
3282 ctx);
3283 }
3284 else
3285 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3286 new_root_info);
3287 /* If speculation was removed, then we need to do nothing. */
3288 if (new_direct_edge && new_direct_edge != ie)
3289 {
3290 new_direct_edge->indirect_inlining_edge = 1;
3291 top = IPA_EDGE_REF (cs);
3292 res = true;
3293 }
3294 else if (new_direct_edge)
3295 {
3296 new_direct_edge->indirect_inlining_edge = 1;
3297 if (new_direct_edge->call_stmt)
3298 new_direct_edge->call_stmt_cannot_inline_p
3299 = !gimple_check_call_matching_types (
3300 new_direct_edge->call_stmt,
3301 new_direct_edge->callee->decl, false);
3302 if (new_edges)
3303 {
3304 new_edges->safe_push (new_direct_edge);
3305 res = true;
3306 }
3307 top = IPA_EDGE_REF (cs);
3308 }
3309 else if (jfunc->type == IPA_JF_PASS_THROUGH
3310 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3311 {
3312 if ((ici->agg_contents
3313 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3314 || (ici->polymorphic
3315 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3316 ici->param_index = -1;
3317 else
3318 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3319 }
3320 else if (jfunc->type == IPA_JF_ANCESTOR)
3321 {
3322 if ((ici->agg_contents
3323 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3324 || (ici->polymorphic
3325 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3326 ici->param_index = -1;
3327 else
3328 {
3329 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3330 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3331 }
3332 }
3333 else
3334 /* Either we can find a destination for this edge now or never. */
3335 ici->param_index = -1;
3336 }
3337
3338 return res;
3339 }
3340
3341 /* Recursively traverse subtree of NODE (including node) made of inlined
3342 cgraph_edges when CS has been inlined and invoke
3343 update_indirect_edges_after_inlining on all nodes and
3344 update_jump_functions_after_inlining on all non-inlined edges that lead out
3345 of this subtree. Newly discovered indirect edges will be added to
3346 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3347 created. */
3348
3349 static bool
3350 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3351 struct cgraph_node *node,
3352 vec<cgraph_edge *> *new_edges)
3353 {
3354 struct cgraph_edge *e;
3355 bool res;
3356
3357 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3358
3359 for (e = node->callees; e; e = e->next_callee)
3360 if (!e->inline_failed)
3361 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3362 else
3363 update_jump_functions_after_inlining (cs, e);
3364 for (e = node->indirect_calls; e; e = e->next_callee)
3365 update_jump_functions_after_inlining (cs, e);
3366
3367 return res;
3368 }
3369
3370 /* Combine two controlled uses counts as done during inlining. */
3371
3372 static int
3373 combine_controlled_uses_counters (int c, int d)
3374 {
3375 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3376 return IPA_UNDESCRIBED_USE;
3377 else
3378 return c + d - 1;
3379 }
3380
3381 /* Propagate number of controlled users from CS->caleee to the new root of the
3382 tree of inlined nodes. */
3383
3384 static void
3385 propagate_controlled_uses (struct cgraph_edge *cs)
3386 {
3387 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3388 struct cgraph_node *new_root = cs->caller->global.inlined_to
3389 ? cs->caller->global.inlined_to : cs->caller;
3390 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3391 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3392 int count, i;
3393
3394 count = MIN (ipa_get_cs_argument_count (args),
3395 ipa_get_param_count (old_root_info));
3396 for (i = 0; i < count; i++)
3397 {
3398 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3399 struct ipa_cst_ref_desc *rdesc;
3400
3401 if (jf->type == IPA_JF_PASS_THROUGH)
3402 {
3403 int src_idx, c, d;
3404 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3405 c = ipa_get_controlled_uses (new_root_info, src_idx);
3406 d = ipa_get_controlled_uses (old_root_info, i);
3407
3408 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3409 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3410 c = combine_controlled_uses_counters (c, d);
3411 ipa_set_controlled_uses (new_root_info, src_idx, c);
3412 if (c == 0 && new_root_info->ipcp_orig_node)
3413 {
3414 struct cgraph_node *n;
3415 struct ipa_ref *ref;
3416 tree t = new_root_info->known_vals[src_idx];
3417
3418 if (t && TREE_CODE (t) == ADDR_EXPR
3419 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3420 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3421 && (ref = new_root->find_reference (n, NULL, 0)))
3422 {
3423 if (dump_file)
3424 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3425 "reference from %s/%i to %s/%i.\n",
3426 xstrdup (new_root->name ()),
3427 new_root->order,
3428 xstrdup (n->name ()), n->order);
3429 ref->remove_reference ();
3430 }
3431 }
3432 }
3433 else if (jf->type == IPA_JF_CONST
3434 && (rdesc = jfunc_rdesc_usable (jf)))
3435 {
3436 int d = ipa_get_controlled_uses (old_root_info, i);
3437 int c = rdesc->refcount;
3438 rdesc->refcount = combine_controlled_uses_counters (c, d);
3439 if (rdesc->refcount == 0)
3440 {
3441 tree cst = ipa_get_jf_constant (jf);
3442 struct cgraph_node *n;
3443 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3444 && TREE_CODE (TREE_OPERAND (cst, 0))
3445 == FUNCTION_DECL);
3446 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3447 if (n)
3448 {
3449 struct cgraph_node *clone;
3450 bool ok;
3451 ok = remove_described_reference (n, rdesc);
3452 gcc_checking_assert (ok);
3453
3454 clone = cs->caller;
3455 while (clone->global.inlined_to
3456 && clone != rdesc->cs->caller
3457 && IPA_NODE_REF (clone)->ipcp_orig_node)
3458 {
3459 struct ipa_ref *ref;
3460 ref = clone->find_reference (n, NULL, 0);
3461 if (ref)
3462 {
3463 if (dump_file)
3464 fprintf (dump_file, "ipa-prop: Removing "
3465 "cloning-created reference "
3466 "from %s/%i to %s/%i.\n",
3467 xstrdup (clone->name ()),
3468 clone->order,
3469 xstrdup (n->name ()),
3470 n->order);
3471 ref->remove_reference ();
3472 }
3473 clone = clone->callers->caller;
3474 }
3475 }
3476 }
3477 }
3478 }
3479
3480 for (i = ipa_get_param_count (old_root_info);
3481 i < ipa_get_cs_argument_count (args);
3482 i++)
3483 {
3484 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3485
3486 if (jf->type == IPA_JF_CONST)
3487 {
3488 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3489 if (rdesc)
3490 rdesc->refcount = IPA_UNDESCRIBED_USE;
3491 }
3492 else if (jf->type == IPA_JF_PASS_THROUGH)
3493 ipa_set_controlled_uses (new_root_info,
3494 jf->value.pass_through.formal_id,
3495 IPA_UNDESCRIBED_USE);
3496 }
3497 }
3498
3499 /* Update jump functions and call note functions on inlining the call site CS.
3500 CS is expected to lead to a node already cloned by
3501 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3502 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3503 created. */
3504
3505 bool
3506 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3507 vec<cgraph_edge *> *new_edges)
3508 {
3509 bool changed;
3510 /* Do nothing if the preparation phase has not been carried out yet
3511 (i.e. during early inlining). */
3512 if (!ipa_node_params_vector.exists ())
3513 return false;
3514 gcc_assert (ipa_edge_args_vector);
3515
3516 propagate_controlled_uses (cs);
3517 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3518
3519 return changed;
3520 }
3521
3522 /* Frees all dynamically allocated structures that the argument info points
3523 to. */
3524
3525 void
3526 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3527 {
3528 vec_free (args->jump_functions);
3529 memset (args, 0, sizeof (*args));
3530 }
3531
3532 /* Free all ipa_edge structures. */
3533
3534 void
3535 ipa_free_all_edge_args (void)
3536 {
3537 int i;
3538 struct ipa_edge_args *args;
3539
3540 if (!ipa_edge_args_vector)
3541 return;
3542
3543 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3544 ipa_free_edge_args_substructures (args);
3545
3546 vec_free (ipa_edge_args_vector);
3547 }
3548
3549 /* Frees all dynamically allocated structures that the param info points
3550 to. */
3551
3552 void
3553 ipa_free_node_params_substructures (struct ipa_node_params *info)
3554 {
3555 info->descriptors.release ();
3556 free (info->lattices);
3557 /* Lattice values and their sources are deallocated with their alocation
3558 pool. */
3559 info->known_vals.release ();
3560 memset (info, 0, sizeof (*info));
3561 }
3562
3563 /* Free all ipa_node_params structures. */
3564
3565 void
3566 ipa_free_all_node_params (void)
3567 {
3568 int i;
3569 struct ipa_node_params *info;
3570
3571 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3572 ipa_free_node_params_substructures (info);
3573
3574 ipa_node_params_vector.release ();
3575 }
3576
3577 /* Set the aggregate replacements of NODE to be AGGVALS. */
3578
3579 void
3580 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3581 struct ipa_agg_replacement_value *aggvals)
3582 {
3583 if (vec_safe_length (ipa_node_agg_replacements)
3584 <= (unsigned) symtab->cgraph_max_uid)
3585 vec_safe_grow_cleared (ipa_node_agg_replacements,
3586 symtab->cgraph_max_uid + 1);
3587
3588 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3589 }
3590
3591 /* Hook that is called by cgraph.c when an edge is removed. */
3592
3593 static void
3594 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3595 {
3596 struct ipa_edge_args *args;
3597
3598 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3599 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3600 return;
3601
3602 args = IPA_EDGE_REF (cs);
3603 if (args->jump_functions)
3604 {
3605 struct ipa_jump_func *jf;
3606 int i;
3607 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3608 {
3609 struct ipa_cst_ref_desc *rdesc;
3610 try_decrement_rdesc_refcount (jf);
3611 if (jf->type == IPA_JF_CONST
3612 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3613 && rdesc->cs == cs)
3614 rdesc->cs = NULL;
3615 }
3616 }
3617
3618 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3619 }
3620
3621 /* Hook that is called by cgraph.c when a node is removed. */
3622
3623 static void
3624 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3625 {
3626 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3627 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3628 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3629 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3630 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3631 }
3632
3633 /* Hook that is called by cgraph.c when an edge is duplicated. */
3634
3635 static void
3636 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3637 __attribute__((unused)) void *data)
3638 {
3639 struct ipa_edge_args *old_args, *new_args;
3640 unsigned int i;
3641
3642 ipa_check_create_edge_args ();
3643
3644 old_args = IPA_EDGE_REF (src);
3645 new_args = IPA_EDGE_REF (dst);
3646
3647 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3648 if (old_args->polymorphic_call_contexts)
3649 new_args->polymorphic_call_contexts
3650 = vec_safe_copy (old_args->polymorphic_call_contexts);
3651
3652 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3653 {
3654 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3655 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3656
3657 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3658
3659 if (src_jf->type == IPA_JF_CONST)
3660 {
3661 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3662
3663 if (!src_rdesc)
3664 dst_jf->value.constant.rdesc = NULL;
3665 else if (src->caller == dst->caller)
3666 {
3667 struct ipa_ref *ref;
3668 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3669 gcc_checking_assert (n);
3670 ref = src->caller->find_reference (n, src->call_stmt,
3671 src->lto_stmt_uid);
3672 gcc_checking_assert (ref);
3673 dst->caller->clone_reference (ref, ref->stmt);
3674
3675 gcc_checking_assert (ipa_refdesc_pool);
3676 struct ipa_cst_ref_desc *dst_rdesc
3677 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3678 dst_rdesc->cs = dst;
3679 dst_rdesc->refcount = src_rdesc->refcount;
3680 dst_rdesc->next_duplicate = NULL;
3681 dst_jf->value.constant.rdesc = dst_rdesc;
3682 }
3683 else if (src_rdesc->cs == src)
3684 {
3685 struct ipa_cst_ref_desc *dst_rdesc;
3686 gcc_checking_assert (ipa_refdesc_pool);
3687 dst_rdesc
3688 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3689 dst_rdesc->cs = dst;
3690 dst_rdesc->refcount = src_rdesc->refcount;
3691 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3692 src_rdesc->next_duplicate = dst_rdesc;
3693 dst_jf->value.constant.rdesc = dst_rdesc;
3694 }
3695 else
3696 {
3697 struct ipa_cst_ref_desc *dst_rdesc;
3698 /* This can happen during inlining, when a JFUNC can refer to a
3699 reference taken in a function up in the tree of inline clones.
3700 We need to find the duplicate that refers to our tree of
3701 inline clones. */
3702
3703 gcc_assert (dst->caller->global.inlined_to);
3704 for (dst_rdesc = src_rdesc->next_duplicate;
3705 dst_rdesc;
3706 dst_rdesc = dst_rdesc->next_duplicate)
3707 {
3708 struct cgraph_node *top;
3709 top = dst_rdesc->cs->caller->global.inlined_to
3710 ? dst_rdesc->cs->caller->global.inlined_to
3711 : dst_rdesc->cs->caller;
3712 if (dst->caller->global.inlined_to == top)
3713 break;
3714 }
3715 gcc_assert (dst_rdesc);
3716 dst_jf->value.constant.rdesc = dst_rdesc;
3717 }
3718 }
3719 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3720 && src->caller == dst->caller)
3721 {
3722 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3723 ? dst->caller->global.inlined_to : dst->caller;
3724 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3725 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3726
3727 int c = ipa_get_controlled_uses (root_info, idx);
3728 if (c != IPA_UNDESCRIBED_USE)
3729 {
3730 c++;
3731 ipa_set_controlled_uses (root_info, idx, c);
3732 }
3733 }
3734 }
3735 }
3736
3737 /* Hook that is called by cgraph.c when a node is duplicated. */
3738
3739 static void
3740 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3741 ATTRIBUTE_UNUSED void *data)
3742 {
3743 struct ipa_node_params *old_info, *new_info;
3744 struct ipa_agg_replacement_value *old_av, *new_av;
3745
3746 ipa_check_create_node_params ();
3747 old_info = IPA_NODE_REF (src);
3748 new_info = IPA_NODE_REF (dst);
3749
3750 new_info->descriptors = old_info->descriptors.copy ();
3751 new_info->lattices = NULL;
3752 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3753
3754 new_info->analysis_done = old_info->analysis_done;
3755 new_info->node_enqueued = old_info->node_enqueued;
3756
3757 old_av = ipa_get_agg_replacements_for_node (src);
3758 if (!old_av)
3759 return;
3760
3761 new_av = NULL;
3762 while (old_av)
3763 {
3764 struct ipa_agg_replacement_value *v;
3765
3766 v = ggc_alloc<ipa_agg_replacement_value> ();
3767 memcpy (v, old_av, sizeof (*v));
3768 v->next = new_av;
3769 new_av = v;
3770 old_av = old_av->next;
3771 }
3772 ipa_set_node_agg_value_chain (dst, new_av);
3773 }
3774
3775
3776 /* Analyze newly added function into callgraph. */
3777
3778 static void
3779 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3780 {
3781 if (node->has_gimple_body_p ())
3782 ipa_analyze_node (node);
3783 }
3784
3785 /* Register our cgraph hooks if they are not already there. */
3786
3787 void
3788 ipa_register_cgraph_hooks (void)
3789 {
3790 if (!edge_removal_hook_holder)
3791 edge_removal_hook_holder =
3792 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3793 if (!node_removal_hook_holder)
3794 node_removal_hook_holder =
3795 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3796 if (!edge_duplication_hook_holder)
3797 edge_duplication_hook_holder =
3798 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3799 if (!node_duplication_hook_holder)
3800 node_duplication_hook_holder =
3801 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3802 function_insertion_hook_holder =
3803 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3804 }
3805
3806 /* Unregister our cgraph hooks if they are not already there. */
3807
3808 static void
3809 ipa_unregister_cgraph_hooks (void)
3810 {
3811 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3812 edge_removal_hook_holder = NULL;
3813 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3814 node_removal_hook_holder = NULL;
3815 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3816 edge_duplication_hook_holder = NULL;
3817 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3818 node_duplication_hook_holder = NULL;
3819 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3820 function_insertion_hook_holder = NULL;
3821 }
3822
3823 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3824 longer needed after ipa-cp. */
3825
3826 void
3827 ipa_free_all_structures_after_ipa_cp (void)
3828 {
3829 if (!optimize)
3830 {
3831 ipa_free_all_edge_args ();
3832 ipa_free_all_node_params ();
3833 free_alloc_pool (ipcp_sources_pool);
3834 free_alloc_pool (ipcp_values_pool);
3835 free_alloc_pool (ipcp_agg_lattice_pool);
3836 ipa_unregister_cgraph_hooks ();
3837 if (ipa_refdesc_pool)
3838 free_alloc_pool (ipa_refdesc_pool);
3839 }
3840 }
3841
3842 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3843 longer needed after indirect inlining. */
3844
3845 void
3846 ipa_free_all_structures_after_iinln (void)
3847 {
3848 ipa_free_all_edge_args ();
3849 ipa_free_all_node_params ();
3850 ipa_unregister_cgraph_hooks ();
3851 if (ipcp_sources_pool)
3852 free_alloc_pool (ipcp_sources_pool);
3853 if (ipcp_values_pool)
3854 free_alloc_pool (ipcp_values_pool);
3855 if (ipcp_agg_lattice_pool)
3856 free_alloc_pool (ipcp_agg_lattice_pool);
3857 if (ipa_refdesc_pool)
3858 free_alloc_pool (ipa_refdesc_pool);
3859 }
3860
3861 /* Print ipa_tree_map data structures of all functions in the
3862 callgraph to F. */
3863
3864 void
3865 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3866 {
3867 int i, count;
3868 struct ipa_node_params *info;
3869
3870 if (!node->definition)
3871 return;
3872 info = IPA_NODE_REF (node);
3873 fprintf (f, " function %s/%i parameter descriptors:\n",
3874 node->name (), node->order);
3875 count = ipa_get_param_count (info);
3876 for (i = 0; i < count; i++)
3877 {
3878 int c;
3879
3880 fprintf (f, " ");
3881 ipa_dump_param (f, info, i);
3882 if (ipa_is_param_used (info, i))
3883 fprintf (f, " used");
3884 c = ipa_get_controlled_uses (info, i);
3885 if (c == IPA_UNDESCRIBED_USE)
3886 fprintf (f, " undescribed_use");
3887 else
3888 fprintf (f, " controlled_uses=%i", c);
3889 fprintf (f, "\n");
3890 }
3891 }
3892
3893 /* Print ipa_tree_map data structures of all functions in the
3894 callgraph to F. */
3895
3896 void
3897 ipa_print_all_params (FILE * f)
3898 {
3899 struct cgraph_node *node;
3900
3901 fprintf (f, "\nFunction parameters:\n");
3902 FOR_EACH_FUNCTION (node)
3903 ipa_print_node_params (f, node);
3904 }
3905
3906 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3907
3908 vec<tree>
3909 ipa_get_vector_of_formal_parms (tree fndecl)
3910 {
3911 vec<tree> args;
3912 int count;
3913 tree parm;
3914
3915 gcc_assert (!flag_wpa);
3916 count = count_formal_params (fndecl);
3917 args.create (count);
3918 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3919 args.quick_push (parm);
3920
3921 return args;
3922 }
3923
3924 /* Return a heap allocated vector containing types of formal parameters of
3925 function type FNTYPE. */
3926
3927 vec<tree>
3928 ipa_get_vector_of_formal_parm_types (tree fntype)
3929 {
3930 vec<tree> types;
3931 int count = 0;
3932 tree t;
3933
3934 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3935 count++;
3936
3937 types.create (count);
3938 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3939 types.quick_push (TREE_VALUE (t));
3940
3941 return types;
3942 }
3943
3944 /* Modify the function declaration FNDECL and its type according to the plan in
3945 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3946 to reflect the actual parameters being modified which are determined by the
3947 base_index field. */
3948
3949 void
3950 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3951 {
3952 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3953 tree orig_type = TREE_TYPE (fndecl);
3954 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3955
3956 /* The following test is an ugly hack, some functions simply don't have any
3957 arguments in their type. This is probably a bug but well... */
3958 bool care_for_types = (old_arg_types != NULL_TREE);
3959 bool last_parm_void;
3960 vec<tree> otypes;
3961 if (care_for_types)
3962 {
3963 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3964 == void_type_node);
3965 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3966 if (last_parm_void)
3967 gcc_assert (oparms.length () + 1 == otypes.length ());
3968 else
3969 gcc_assert (oparms.length () == otypes.length ());
3970 }
3971 else
3972 {
3973 last_parm_void = false;
3974 otypes.create (0);
3975 }
3976
3977 int len = adjustments.length ();
3978 tree *link = &DECL_ARGUMENTS (fndecl);
3979 tree new_arg_types = NULL;
3980 for (int i = 0; i < len; i++)
3981 {
3982 struct ipa_parm_adjustment *adj;
3983 gcc_assert (link);
3984
3985 adj = &adjustments[i];
3986 tree parm;
3987 if (adj->op == IPA_PARM_OP_NEW)
3988 parm = NULL;
3989 else
3990 parm = oparms[adj->base_index];
3991 adj->base = parm;
3992
3993 if (adj->op == IPA_PARM_OP_COPY)
3994 {
3995 if (care_for_types)
3996 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3997 new_arg_types);
3998 *link = parm;
3999 link = &DECL_CHAIN (parm);
4000 }
4001 else if (adj->op != IPA_PARM_OP_REMOVE)
4002 {
4003 tree new_parm;
4004 tree ptype;
4005
4006 if (adj->by_ref)
4007 ptype = build_pointer_type (adj->type);
4008 else
4009 {
4010 ptype = adj->type;
4011 if (is_gimple_reg_type (ptype))
4012 {
4013 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4014 if (TYPE_ALIGN (ptype) < malign)
4015 ptype = build_aligned_type (ptype, malign);
4016 }
4017 }
4018
4019 if (care_for_types)
4020 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4021
4022 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4023 ptype);
4024 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4025 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4026 DECL_ARTIFICIAL (new_parm) = 1;
4027 DECL_ARG_TYPE (new_parm) = ptype;
4028 DECL_CONTEXT (new_parm) = fndecl;
4029 TREE_USED (new_parm) = 1;
4030 DECL_IGNORED_P (new_parm) = 1;
4031 layout_decl (new_parm, 0);
4032
4033 if (adj->op == IPA_PARM_OP_NEW)
4034 adj->base = NULL;
4035 else
4036 adj->base = parm;
4037 adj->new_decl = new_parm;
4038
4039 *link = new_parm;
4040 link = &DECL_CHAIN (new_parm);
4041 }
4042 }
4043
4044 *link = NULL_TREE;
4045
4046 tree new_reversed = NULL;
4047 if (care_for_types)
4048 {
4049 new_reversed = nreverse (new_arg_types);
4050 if (last_parm_void)
4051 {
4052 if (new_reversed)
4053 TREE_CHAIN (new_arg_types) = void_list_node;
4054 else
4055 new_reversed = void_list_node;
4056 }
4057 }
4058
4059 /* Use copy_node to preserve as much as possible from original type
4060 (debug info, attribute lists etc.)
4061 Exception is METHOD_TYPEs must have THIS argument.
4062 When we are asked to remove it, we need to build new FUNCTION_TYPE
4063 instead. */
4064 tree new_type = NULL;
4065 if (TREE_CODE (orig_type) != METHOD_TYPE
4066 || (adjustments[0].op == IPA_PARM_OP_COPY
4067 && adjustments[0].base_index == 0))
4068 {
4069 new_type = build_distinct_type_copy (orig_type);
4070 TYPE_ARG_TYPES (new_type) = new_reversed;
4071 }
4072 else
4073 {
4074 new_type
4075 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4076 new_reversed));
4077 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4078 DECL_VINDEX (fndecl) = NULL_TREE;
4079 }
4080
4081 /* When signature changes, we need to clear builtin info. */
4082 if (DECL_BUILT_IN (fndecl))
4083 {
4084 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4085 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4086 }
4087
4088 TREE_TYPE (fndecl) = new_type;
4089 DECL_VIRTUAL_P (fndecl) = 0;
4090 DECL_LANG_SPECIFIC (fndecl) = NULL;
4091 otypes.release ();
4092 oparms.release ();
4093 }
4094
4095 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4096 If this is a directly recursive call, CS must be NULL. Otherwise it must
4097 contain the corresponding call graph edge. */
4098
4099 void
4100 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
4101 ipa_parm_adjustment_vec adjustments)
4102 {
4103 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4104 vec<tree> vargs;
4105 vec<tree, va_gc> **debug_args = NULL;
4106 gimple new_stmt;
4107 gimple_stmt_iterator gsi, prev_gsi;
4108 tree callee_decl;
4109 int i, len;
4110
4111 len = adjustments.length ();
4112 vargs.create (len);
4113 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4114 current_node->remove_stmt_references (stmt);
4115
4116 gsi = gsi_for_stmt (stmt);
4117 prev_gsi = gsi;
4118 gsi_prev (&prev_gsi);
4119 for (i = 0; i < len; i++)
4120 {
4121 struct ipa_parm_adjustment *adj;
4122
4123 adj = &adjustments[i];
4124
4125 if (adj->op == IPA_PARM_OP_COPY)
4126 {
4127 tree arg = gimple_call_arg (stmt, adj->base_index);
4128
4129 vargs.quick_push (arg);
4130 }
4131 else if (adj->op != IPA_PARM_OP_REMOVE)
4132 {
4133 tree expr, base, off;
4134 location_t loc;
4135 unsigned int deref_align = 0;
4136 bool deref_base = false;
4137
4138 /* We create a new parameter out of the value of the old one, we can
4139 do the following kind of transformations:
4140
4141 - A scalar passed by reference is converted to a scalar passed by
4142 value. (adj->by_ref is false and the type of the original
4143 actual argument is a pointer to a scalar).
4144
4145 - A part of an aggregate is passed instead of the whole aggregate.
4146 The part can be passed either by value or by reference, this is
4147 determined by value of adj->by_ref. Moreover, the code below
4148 handles both situations when the original aggregate is passed by
4149 value (its type is not a pointer) and when it is passed by
4150 reference (it is a pointer to an aggregate).
4151
4152 When the new argument is passed by reference (adj->by_ref is true)
4153 it must be a part of an aggregate and therefore we form it by
4154 simply taking the address of a reference inside the original
4155 aggregate. */
4156
4157 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4158 base = gimple_call_arg (stmt, adj->base_index);
4159 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4160 : EXPR_LOCATION (base);
4161
4162 if (TREE_CODE (base) != ADDR_EXPR
4163 && POINTER_TYPE_P (TREE_TYPE (base)))
4164 off = build_int_cst (adj->alias_ptr_type,
4165 adj->offset / BITS_PER_UNIT);
4166 else
4167 {
4168 HOST_WIDE_INT base_offset;
4169 tree prev_base;
4170 bool addrof;
4171
4172 if (TREE_CODE (base) == ADDR_EXPR)
4173 {
4174 base = TREE_OPERAND (base, 0);
4175 addrof = true;
4176 }
4177 else
4178 addrof = false;
4179 prev_base = base;
4180 base = get_addr_base_and_unit_offset (base, &base_offset);
4181 /* Aggregate arguments can have non-invariant addresses. */
4182 if (!base)
4183 {
4184 base = build_fold_addr_expr (prev_base);
4185 off = build_int_cst (adj->alias_ptr_type,
4186 adj->offset / BITS_PER_UNIT);
4187 }
4188 else if (TREE_CODE (base) == MEM_REF)
4189 {
4190 if (!addrof)
4191 {
4192 deref_base = true;
4193 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4194 }
4195 off = build_int_cst (adj->alias_ptr_type,
4196 base_offset
4197 + adj->offset / BITS_PER_UNIT);
4198 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4199 off);
4200 base = TREE_OPERAND (base, 0);
4201 }
4202 else
4203 {
4204 off = build_int_cst (adj->alias_ptr_type,
4205 base_offset
4206 + adj->offset / BITS_PER_UNIT);
4207 base = build_fold_addr_expr (base);
4208 }
4209 }
4210
4211 if (!adj->by_ref)
4212 {
4213 tree type = adj->type;
4214 unsigned int align;
4215 unsigned HOST_WIDE_INT misalign;
4216
4217 if (deref_base)
4218 {
4219 align = deref_align;
4220 misalign = 0;
4221 }
4222 else
4223 {
4224 get_pointer_alignment_1 (base, &align, &misalign);
4225 if (TYPE_ALIGN (type) > align)
4226 align = TYPE_ALIGN (type);
4227 }
4228 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4229 * BITS_PER_UNIT);
4230 misalign = misalign & (align - 1);
4231 if (misalign != 0)
4232 align = (misalign & -misalign);
4233 if (align < TYPE_ALIGN (type))
4234 type = build_aligned_type (type, align);
4235 base = force_gimple_operand_gsi (&gsi, base,
4236 true, NULL, true, GSI_SAME_STMT);
4237 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4238 /* If expr is not a valid gimple call argument emit
4239 a load into a temporary. */
4240 if (is_gimple_reg_type (TREE_TYPE (expr)))
4241 {
4242 gimple tem = gimple_build_assign (NULL_TREE, expr);
4243 if (gimple_in_ssa_p (cfun))
4244 {
4245 gimple_set_vuse (tem, gimple_vuse (stmt));
4246 expr = make_ssa_name (TREE_TYPE (expr), tem);
4247 }
4248 else
4249 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4250 gimple_assign_set_lhs (tem, expr);
4251 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4252 }
4253 }
4254 else
4255 {
4256 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4257 expr = build_fold_addr_expr (expr);
4258 expr = force_gimple_operand_gsi (&gsi, expr,
4259 true, NULL, true, GSI_SAME_STMT);
4260 }
4261 vargs.quick_push (expr);
4262 }
4263 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4264 {
4265 unsigned int ix;
4266 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4267 gimple def_temp;
4268
4269 arg = gimple_call_arg (stmt, adj->base_index);
4270 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4271 {
4272 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4273 continue;
4274 arg = fold_convert_loc (gimple_location (stmt),
4275 TREE_TYPE (origin), arg);
4276 }
4277 if (debug_args == NULL)
4278 debug_args = decl_debug_args_insert (callee_decl);
4279 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4280 if (ddecl == origin)
4281 {
4282 ddecl = (**debug_args)[ix + 1];
4283 break;
4284 }
4285 if (ddecl == NULL)
4286 {
4287 ddecl = make_node (DEBUG_EXPR_DECL);
4288 DECL_ARTIFICIAL (ddecl) = 1;
4289 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4290 DECL_MODE (ddecl) = DECL_MODE (origin);
4291
4292 vec_safe_push (*debug_args, origin);
4293 vec_safe_push (*debug_args, ddecl);
4294 }
4295 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4296 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4297 }
4298 }
4299
4300 if (dump_file && (dump_flags & TDF_DETAILS))
4301 {
4302 fprintf (dump_file, "replacing stmt:");
4303 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4304 }
4305
4306 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4307 vargs.release ();
4308 if (gimple_call_lhs (stmt))
4309 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4310
4311 gimple_set_block (new_stmt, gimple_block (stmt));
4312 if (gimple_has_location (stmt))
4313 gimple_set_location (new_stmt, gimple_location (stmt));
4314 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4315 gimple_call_copy_flags (new_stmt, stmt);
4316 if (gimple_in_ssa_p (cfun))
4317 {
4318 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4319 if (gimple_vdef (stmt))
4320 {
4321 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4322 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4323 }
4324 }
4325
4326 if (dump_file && (dump_flags & TDF_DETAILS))
4327 {
4328 fprintf (dump_file, "with stmt:");
4329 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4330 fprintf (dump_file, "\n");
4331 }
4332 gsi_replace (&gsi, new_stmt, true);
4333 if (cs)
4334 cs->set_call_stmt (new_stmt);
4335 do
4336 {
4337 current_node->record_stmt_references (gsi_stmt (gsi));
4338 gsi_prev (&gsi);
4339 }
4340 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4341 }
4342
4343 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4344 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4345 specifies whether the function should care about type incompatibility the
4346 current and new expressions. If it is false, the function will leave
4347 incompatibility issues to the caller. Return true iff the expression
4348 was modified. */
4349
4350 bool
4351 ipa_modify_expr (tree *expr, bool convert,
4352 ipa_parm_adjustment_vec adjustments)
4353 {
4354 struct ipa_parm_adjustment *cand
4355 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4356 if (!cand)
4357 return false;
4358
4359 tree src;
4360 if (cand->by_ref)
4361 src = build_simple_mem_ref (cand->new_decl);
4362 else
4363 src = cand->new_decl;
4364
4365 if (dump_file && (dump_flags & TDF_DETAILS))
4366 {
4367 fprintf (dump_file, "About to replace expr ");
4368 print_generic_expr (dump_file, *expr, 0);
4369 fprintf (dump_file, " with ");
4370 print_generic_expr (dump_file, src, 0);
4371 fprintf (dump_file, "\n");
4372 }
4373
4374 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4375 {
4376 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4377 *expr = vce;
4378 }
4379 else
4380 *expr = src;
4381 return true;
4382 }
4383
4384 /* If T is an SSA_NAME, return NULL if it is not a default def or
4385 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4386 the base variable is always returned, regardless if it is a default
4387 def. Return T if it is not an SSA_NAME. */
4388
4389 static tree
4390 get_ssa_base_param (tree t, bool ignore_default_def)
4391 {
4392 if (TREE_CODE (t) == SSA_NAME)
4393 {
4394 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4395 return SSA_NAME_VAR (t);
4396 else
4397 return NULL_TREE;
4398 }
4399 return t;
4400 }
4401
4402 /* Given an expression, return an adjustment entry specifying the
4403 transformation to be done on EXPR. If no suitable adjustment entry
4404 was found, returns NULL.
4405
4406 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4407 default def, otherwise bail on them.
4408
4409 If CONVERT is non-NULL, this function will set *CONVERT if the
4410 expression provided is a component reference. ADJUSTMENTS is the
4411 adjustments vector. */
4412
4413 ipa_parm_adjustment *
4414 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4415 ipa_parm_adjustment_vec adjustments,
4416 bool ignore_default_def)
4417 {
4418 if (TREE_CODE (**expr) == BIT_FIELD_REF
4419 || TREE_CODE (**expr) == IMAGPART_EXPR
4420 || TREE_CODE (**expr) == REALPART_EXPR)
4421 {
4422 *expr = &TREE_OPERAND (**expr, 0);
4423 if (convert)
4424 *convert = true;
4425 }
4426
4427 HOST_WIDE_INT offset, size, max_size;
4428 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4429 if (!base || size == -1 || max_size == -1)
4430 return NULL;
4431
4432 if (TREE_CODE (base) == MEM_REF)
4433 {
4434 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4435 base = TREE_OPERAND (base, 0);
4436 }
4437
4438 base = get_ssa_base_param (base, ignore_default_def);
4439 if (!base || TREE_CODE (base) != PARM_DECL)
4440 return NULL;
4441
4442 struct ipa_parm_adjustment *cand = NULL;
4443 unsigned int len = adjustments.length ();
4444 for (unsigned i = 0; i < len; i++)
4445 {
4446 struct ipa_parm_adjustment *adj = &adjustments[i];
4447
4448 if (adj->base == base
4449 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4450 {
4451 cand = adj;
4452 break;
4453 }
4454 }
4455
4456 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4457 return NULL;
4458 return cand;
4459 }
4460
4461 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4462
4463 static bool
4464 index_in_adjustments_multiple_times_p (int base_index,
4465 ipa_parm_adjustment_vec adjustments)
4466 {
4467 int i, len = adjustments.length ();
4468 bool one = false;
4469
4470 for (i = 0; i < len; i++)
4471 {
4472 struct ipa_parm_adjustment *adj;
4473 adj = &adjustments[i];
4474
4475 if (adj->base_index == base_index)
4476 {
4477 if (one)
4478 return true;
4479 else
4480 one = true;
4481 }
4482 }
4483 return false;
4484 }
4485
4486
4487 /* Return adjustments that should have the same effect on function parameters
4488 and call arguments as if they were first changed according to adjustments in
4489 INNER and then by adjustments in OUTER. */
4490
4491 ipa_parm_adjustment_vec
4492 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4493 ipa_parm_adjustment_vec outer)
4494 {
4495 int i, outlen = outer.length ();
4496 int inlen = inner.length ();
4497 int removals = 0;
4498 ipa_parm_adjustment_vec adjustments, tmp;
4499
4500 tmp.create (inlen);
4501 for (i = 0; i < inlen; i++)
4502 {
4503 struct ipa_parm_adjustment *n;
4504 n = &inner[i];
4505
4506 if (n->op == IPA_PARM_OP_REMOVE)
4507 removals++;
4508 else
4509 {
4510 /* FIXME: Handling of new arguments are not implemented yet. */
4511 gcc_assert (n->op != IPA_PARM_OP_NEW);
4512 tmp.quick_push (*n);
4513 }
4514 }
4515
4516 adjustments.create (outlen + removals);
4517 for (i = 0; i < outlen; i++)
4518 {
4519 struct ipa_parm_adjustment r;
4520 struct ipa_parm_adjustment *out = &outer[i];
4521 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4522
4523 memset (&r, 0, sizeof (r));
4524 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4525 if (out->op == IPA_PARM_OP_REMOVE)
4526 {
4527 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4528 {
4529 r.op = IPA_PARM_OP_REMOVE;
4530 adjustments.quick_push (r);
4531 }
4532 continue;
4533 }
4534 else
4535 {
4536 /* FIXME: Handling of new arguments are not implemented yet. */
4537 gcc_assert (out->op != IPA_PARM_OP_NEW);
4538 }
4539
4540 r.base_index = in->base_index;
4541 r.type = out->type;
4542
4543 /* FIXME: Create nonlocal value too. */
4544
4545 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4546 r.op = IPA_PARM_OP_COPY;
4547 else if (in->op == IPA_PARM_OP_COPY)
4548 r.offset = out->offset;
4549 else if (out->op == IPA_PARM_OP_COPY)
4550 r.offset = in->offset;
4551 else
4552 r.offset = in->offset + out->offset;
4553 adjustments.quick_push (r);
4554 }
4555
4556 for (i = 0; i < inlen; i++)
4557 {
4558 struct ipa_parm_adjustment *n = &inner[i];
4559
4560 if (n->op == IPA_PARM_OP_REMOVE)
4561 adjustments.quick_push (*n);
4562 }
4563
4564 tmp.release ();
4565 return adjustments;
4566 }
4567
4568 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4569 friendly way, assuming they are meant to be applied to FNDECL. */
4570
4571 void
4572 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4573 tree fndecl)
4574 {
4575 int i, len = adjustments.length ();
4576 bool first = true;
4577 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4578
4579 fprintf (file, "IPA param adjustments: ");
4580 for (i = 0; i < len; i++)
4581 {
4582 struct ipa_parm_adjustment *adj;
4583 adj = &adjustments[i];
4584
4585 if (!first)
4586 fprintf (file, " ");
4587 else
4588 first = false;
4589
4590 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4591 print_generic_expr (file, parms[adj->base_index], 0);
4592 if (adj->base)
4593 {
4594 fprintf (file, ", base: ");
4595 print_generic_expr (file, adj->base, 0);
4596 }
4597 if (adj->new_decl)
4598 {
4599 fprintf (file, ", new_decl: ");
4600 print_generic_expr (file, adj->new_decl, 0);
4601 }
4602 if (adj->new_ssa_base)
4603 {
4604 fprintf (file, ", new_ssa_base: ");
4605 print_generic_expr (file, adj->new_ssa_base, 0);
4606 }
4607
4608 if (adj->op == IPA_PARM_OP_COPY)
4609 fprintf (file, ", copy_param");
4610 else if (adj->op == IPA_PARM_OP_REMOVE)
4611 fprintf (file, ", remove_param");
4612 else
4613 fprintf (file, ", offset %li", (long) adj->offset);
4614 if (adj->by_ref)
4615 fprintf (file, ", by_ref");
4616 print_node_brief (file, ", type: ", adj->type, 0);
4617 fprintf (file, "\n");
4618 }
4619 parms.release ();
4620 }
4621
4622 /* Dump the AV linked list. */
4623
4624 void
4625 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4626 {
4627 bool comma = false;
4628 fprintf (f, " Aggregate replacements:");
4629 for (; av; av = av->next)
4630 {
4631 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4632 av->index, av->offset);
4633 print_generic_expr (f, av->value, 0);
4634 comma = true;
4635 }
4636 fprintf (f, "\n");
4637 }
4638
4639 /* Stream out jump function JUMP_FUNC to OB. */
4640
4641 static void
4642 ipa_write_jump_function (struct output_block *ob,
4643 struct ipa_jump_func *jump_func)
4644 {
4645 struct ipa_agg_jf_item *item;
4646 struct bitpack_d bp;
4647 int i, count;
4648
4649 streamer_write_uhwi (ob, jump_func->type);
4650 switch (jump_func->type)
4651 {
4652 case IPA_JF_UNKNOWN:
4653 break;
4654 case IPA_JF_KNOWN_TYPE:
4655 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4656 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4657 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4658 break;
4659 case IPA_JF_CONST:
4660 gcc_assert (
4661 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4662 stream_write_tree (ob, jump_func->value.constant.value, true);
4663 break;
4664 case IPA_JF_PASS_THROUGH:
4665 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4666 if (jump_func->value.pass_through.operation == NOP_EXPR)
4667 {
4668 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4669 bp = bitpack_create (ob->main_stream);
4670 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4671 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4672 streamer_write_bitpack (&bp);
4673 }
4674 else
4675 {
4676 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4677 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4678 }
4679 break;
4680 case IPA_JF_ANCESTOR:
4681 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4682 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4683 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4684 bp = bitpack_create (ob->main_stream);
4685 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4686 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4687 streamer_write_bitpack (&bp);
4688 break;
4689 }
4690
4691 count = vec_safe_length (jump_func->agg.items);
4692 streamer_write_uhwi (ob, count);
4693 if (count)
4694 {
4695 bp = bitpack_create (ob->main_stream);
4696 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4697 streamer_write_bitpack (&bp);
4698 }
4699
4700 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4701 {
4702 streamer_write_uhwi (ob, item->offset);
4703 stream_write_tree (ob, item->value, true);
4704 }
4705 }
4706
4707 /* Read in jump function JUMP_FUNC from IB. */
4708
4709 static void
4710 ipa_read_jump_function (struct lto_input_block *ib,
4711 struct ipa_jump_func *jump_func,
4712 struct cgraph_edge *cs,
4713 struct data_in *data_in)
4714 {
4715 enum jump_func_type jftype;
4716 enum tree_code operation;
4717 int i, count;
4718
4719 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4720 switch (jftype)
4721 {
4722 case IPA_JF_UNKNOWN:
4723 jump_func->type = IPA_JF_UNKNOWN;
4724 break;
4725 case IPA_JF_KNOWN_TYPE:
4726 {
4727 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4728 tree base_type = stream_read_tree (ib, data_in);
4729 tree component_type = stream_read_tree (ib, data_in);
4730
4731 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4732 break;
4733 }
4734 case IPA_JF_CONST:
4735 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4736 break;
4737 case IPA_JF_PASS_THROUGH:
4738 operation = (enum tree_code) streamer_read_uhwi (ib);
4739 if (operation == NOP_EXPR)
4740 {
4741 int formal_id = streamer_read_uhwi (ib);
4742 struct bitpack_d bp = streamer_read_bitpack (ib);
4743 bool agg_preserved = bp_unpack_value (&bp, 1);
4744 bool type_preserved = bp_unpack_value (&bp, 1);
4745 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4746 type_preserved);
4747 }
4748 else
4749 {
4750 tree operand = stream_read_tree (ib, data_in);
4751 int formal_id = streamer_read_uhwi (ib);
4752 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4753 operation);
4754 }
4755 break;
4756 case IPA_JF_ANCESTOR:
4757 {
4758 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4759 tree type = stream_read_tree (ib, data_in);
4760 int formal_id = streamer_read_uhwi (ib);
4761 struct bitpack_d bp = streamer_read_bitpack (ib);
4762 bool agg_preserved = bp_unpack_value (&bp, 1);
4763 bool type_preserved = bp_unpack_value (&bp, 1);
4764
4765 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4766 type_preserved);
4767 break;
4768 }
4769 }
4770
4771 count = streamer_read_uhwi (ib);
4772 vec_alloc (jump_func->agg.items, count);
4773 if (count)
4774 {
4775 struct bitpack_d bp = streamer_read_bitpack (ib);
4776 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4777 }
4778 for (i = 0; i < count; i++)
4779 {
4780 struct ipa_agg_jf_item item;
4781 item.offset = streamer_read_uhwi (ib);
4782 item.value = stream_read_tree (ib, data_in);
4783 jump_func->agg.items->quick_push (item);
4784 }
4785 }
4786
4787 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4788 relevant to indirect inlining to OB. */
4789
4790 static void
4791 ipa_write_indirect_edge_info (struct output_block *ob,
4792 struct cgraph_edge *cs)
4793 {
4794 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4795 struct bitpack_d bp;
4796
4797 streamer_write_hwi (ob, ii->param_index);
4798 bp = bitpack_create (ob->main_stream);
4799 bp_pack_value (&bp, ii->polymorphic, 1);
4800 bp_pack_value (&bp, ii->agg_contents, 1);
4801 bp_pack_value (&bp, ii->member_ptr, 1);
4802 bp_pack_value (&bp, ii->by_ref, 1);
4803 streamer_write_bitpack (&bp);
4804 if (ii->agg_contents || ii->polymorphic)
4805 streamer_write_hwi (ob, ii->offset);
4806 else
4807 gcc_assert (ii->offset == 0);
4808
4809 if (ii->polymorphic)
4810 {
4811 streamer_write_hwi (ob, ii->otr_token);
4812 stream_write_tree (ob, ii->otr_type, true);
4813 ii->context.stream_out (ob);
4814 }
4815 }
4816
4817 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4818 relevant to indirect inlining from IB. */
4819
4820 static void
4821 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4822 struct data_in *data_in,
4823 struct cgraph_edge *cs)
4824 {
4825 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4826 struct bitpack_d bp;
4827
4828 ii->param_index = (int) streamer_read_hwi (ib);
4829 bp = streamer_read_bitpack (ib);
4830 ii->polymorphic = bp_unpack_value (&bp, 1);
4831 ii->agg_contents = bp_unpack_value (&bp, 1);
4832 ii->member_ptr = bp_unpack_value (&bp, 1);
4833 ii->by_ref = bp_unpack_value (&bp, 1);
4834 if (ii->agg_contents || ii->polymorphic)
4835 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4836 else
4837 ii->offset = 0;
4838 if (ii->polymorphic)
4839 {
4840 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4841 ii->otr_type = stream_read_tree (ib, data_in);
4842 ii->context.stream_in (ib, data_in);
4843 }
4844 }
4845
4846 /* Stream out NODE info to OB. */
4847
4848 static void
4849 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4850 {
4851 int node_ref;
4852 lto_symtab_encoder_t encoder;
4853 struct ipa_node_params *info = IPA_NODE_REF (node);
4854 int j;
4855 struct cgraph_edge *e;
4856 struct bitpack_d bp;
4857
4858 encoder = ob->decl_state->symtab_node_encoder;
4859 node_ref = lto_symtab_encoder_encode (encoder, node);
4860 streamer_write_uhwi (ob, node_ref);
4861
4862 streamer_write_uhwi (ob, ipa_get_param_count (info));
4863 for (j = 0; j < ipa_get_param_count (info); j++)
4864 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4865 bp = bitpack_create (ob->main_stream);
4866 gcc_assert (info->analysis_done
4867 || ipa_get_param_count (info) == 0);
4868 gcc_assert (!info->node_enqueued);
4869 gcc_assert (!info->ipcp_orig_node);
4870 for (j = 0; j < ipa_get_param_count (info); j++)
4871 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4872 streamer_write_bitpack (&bp);
4873 for (j = 0; j < ipa_get_param_count (info); j++)
4874 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4875 for (e = node->callees; e; e = e->next_callee)
4876 {
4877 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4878
4879 streamer_write_uhwi (ob,
4880 ipa_get_cs_argument_count (args) * 2
4881 + (args->polymorphic_call_contexts != NULL));
4882 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4883 {
4884 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4885 if (args->polymorphic_call_contexts != NULL)
4886 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4887 }
4888 }
4889 for (e = node->indirect_calls; e; e = e->next_callee)
4890 {
4891 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4892
4893 streamer_write_uhwi (ob,
4894 ipa_get_cs_argument_count (args) * 2
4895 + (args->polymorphic_call_contexts != NULL));
4896 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4897 {
4898 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4899 if (args->polymorphic_call_contexts != NULL)
4900 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4901 }
4902 ipa_write_indirect_edge_info (ob, e);
4903 }
4904 }
4905
4906 /* Stream in NODE info from IB. */
4907
4908 static void
4909 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4910 struct data_in *data_in)
4911 {
4912 struct ipa_node_params *info = IPA_NODE_REF (node);
4913 int k;
4914 struct cgraph_edge *e;
4915 struct bitpack_d bp;
4916
4917 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4918
4919 for (k = 0; k < ipa_get_param_count (info); k++)
4920 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4921
4922 bp = streamer_read_bitpack (ib);
4923 if (ipa_get_param_count (info) != 0)
4924 info->analysis_done = true;
4925 info->node_enqueued = false;
4926 for (k = 0; k < ipa_get_param_count (info); k++)
4927 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4928 for (k = 0; k < ipa_get_param_count (info); k++)
4929 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4930 for (e = node->callees; e; e = e->next_callee)
4931 {
4932 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4933 int count = streamer_read_uhwi (ib);
4934 bool contexts_computed = count & 1;
4935 count /= 2;
4936
4937 if (!count)
4938 continue;
4939 vec_safe_grow_cleared (args->jump_functions, count);
4940 if (contexts_computed)
4941 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4942
4943 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4944 {
4945 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4946 data_in);
4947 if (contexts_computed)
4948 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4949 }
4950 }
4951 for (e = node->indirect_calls; e; e = e->next_callee)
4952 {
4953 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4954 int count = streamer_read_uhwi (ib);
4955 bool contexts_computed = count & 1;
4956 count /= 2;
4957
4958 if (count)
4959 {
4960 vec_safe_grow_cleared (args->jump_functions, count);
4961 if (contexts_computed)
4962 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4963 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4964 {
4965 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4966 data_in);
4967 if (contexts_computed)
4968 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4969 }
4970 }
4971 ipa_read_indirect_edge_info (ib, data_in, e);
4972 }
4973 }
4974
4975 /* Write jump functions for nodes in SET. */
4976
4977 void
4978 ipa_prop_write_jump_functions (void)
4979 {
4980 struct cgraph_node *node;
4981 struct output_block *ob;
4982 unsigned int count = 0;
4983 lto_symtab_encoder_iterator lsei;
4984 lto_symtab_encoder_t encoder;
4985
4986
4987 if (!ipa_node_params_vector.exists ())
4988 return;
4989
4990 ob = create_output_block (LTO_section_jump_functions);
4991 encoder = ob->decl_state->symtab_node_encoder;
4992 ob->symbol = NULL;
4993 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4994 lsei_next_function_in_partition (&lsei))
4995 {
4996 node = lsei_cgraph_node (lsei);
4997 if (node->has_gimple_body_p ()
4998 && IPA_NODE_REF (node) != NULL)
4999 count++;
5000 }
5001
5002 streamer_write_uhwi (ob, count);
5003
5004 /* Process all of the functions. */
5005 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5006 lsei_next_function_in_partition (&lsei))
5007 {
5008 node = lsei_cgraph_node (lsei);
5009 if (node->has_gimple_body_p ()
5010 && IPA_NODE_REF (node) != NULL)
5011 ipa_write_node_info (ob, node);
5012 }
5013 streamer_write_char_stream (ob->main_stream, 0);
5014 produce_asm (ob, NULL);
5015 destroy_output_block (ob);
5016 }
5017
5018 /* Read section in file FILE_DATA of length LEN with data DATA. */
5019
5020 static void
5021 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5022 size_t len)
5023 {
5024 const struct lto_function_header *header =
5025 (const struct lto_function_header *) data;
5026 const int cfg_offset = sizeof (struct lto_function_header);
5027 const int main_offset = cfg_offset + header->cfg_size;
5028 const int string_offset = main_offset + header->main_size;
5029 struct data_in *data_in;
5030 unsigned int i;
5031 unsigned int count;
5032
5033 lto_input_block ib_main ((const char *) data + main_offset,
5034 header->main_size);
5035
5036 data_in =
5037 lto_data_in_create (file_data, (const char *) data + string_offset,
5038 header->string_size, vNULL);
5039 count = streamer_read_uhwi (&ib_main);
5040
5041 for (i = 0; i < count; i++)
5042 {
5043 unsigned int index;
5044 struct cgraph_node *node;
5045 lto_symtab_encoder_t encoder;
5046
5047 index = streamer_read_uhwi (&ib_main);
5048 encoder = file_data->symtab_node_encoder;
5049 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5050 index));
5051 gcc_assert (node->definition);
5052 ipa_read_node_info (&ib_main, node, data_in);
5053 }
5054 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5055 len);
5056 lto_data_in_delete (data_in);
5057 }
5058
5059 /* Read ipcp jump functions. */
5060
5061 void
5062 ipa_prop_read_jump_functions (void)
5063 {
5064 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5065 struct lto_file_decl_data *file_data;
5066 unsigned int j = 0;
5067
5068 ipa_check_create_node_params ();
5069 ipa_check_create_edge_args ();
5070 ipa_register_cgraph_hooks ();
5071
5072 while ((file_data = file_data_vec[j++]))
5073 {
5074 size_t len;
5075 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5076
5077 if (data)
5078 ipa_prop_read_section (file_data, data, len);
5079 }
5080 }
5081
5082 /* After merging units, we can get mismatch in argument counts.
5083 Also decl merging might've rendered parameter lists obsolete.
5084 Also compute called_with_variable_arg info. */
5085
5086 void
5087 ipa_update_after_lto_read (void)
5088 {
5089 ipa_check_create_node_params ();
5090 ipa_check_create_edge_args ();
5091 }
5092
5093 void
5094 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
5095 {
5096 int node_ref;
5097 unsigned int count = 0;
5098 lto_symtab_encoder_t encoder;
5099 struct ipa_agg_replacement_value *aggvals, *av;
5100
5101 aggvals = ipa_get_agg_replacements_for_node (node);
5102 encoder = ob->decl_state->symtab_node_encoder;
5103 node_ref = lto_symtab_encoder_encode (encoder, node);
5104 streamer_write_uhwi (ob, node_ref);
5105
5106 for (av = aggvals; av; av = av->next)
5107 count++;
5108 streamer_write_uhwi (ob, count);
5109
5110 for (av = aggvals; av; av = av->next)
5111 {
5112 struct bitpack_d bp;
5113
5114 streamer_write_uhwi (ob, av->offset);
5115 streamer_write_uhwi (ob, av->index);
5116 stream_write_tree (ob, av->value, true);
5117
5118 bp = bitpack_create (ob->main_stream);
5119 bp_pack_value (&bp, av->by_ref, 1);
5120 streamer_write_bitpack (&bp);
5121 }
5122 }
5123
5124 /* Stream in the aggregate value replacement chain for NODE from IB. */
5125
5126 static void
5127 read_agg_replacement_chain (struct lto_input_block *ib,
5128 struct cgraph_node *node,
5129 struct data_in *data_in)
5130 {
5131 struct ipa_agg_replacement_value *aggvals = NULL;
5132 unsigned int count, i;
5133
5134 count = streamer_read_uhwi (ib);
5135 for (i = 0; i <count; i++)
5136 {
5137 struct ipa_agg_replacement_value *av;
5138 struct bitpack_d bp;
5139
5140 av = ggc_alloc<ipa_agg_replacement_value> ();
5141 av->offset = streamer_read_uhwi (ib);
5142 av->index = streamer_read_uhwi (ib);
5143 av->value = stream_read_tree (ib, data_in);
5144 bp = streamer_read_bitpack (ib);
5145 av->by_ref = bp_unpack_value (&bp, 1);
5146 av->next = aggvals;
5147 aggvals = av;
5148 }
5149 ipa_set_node_agg_value_chain (node, aggvals);
5150 }
5151
5152 /* Write all aggregate replacement for nodes in set. */
5153
5154 void
5155 ipa_prop_write_all_agg_replacement (void)
5156 {
5157 struct cgraph_node *node;
5158 struct output_block *ob;
5159 unsigned int count = 0;
5160 lto_symtab_encoder_iterator lsei;
5161 lto_symtab_encoder_t encoder;
5162
5163 if (!ipa_node_agg_replacements)
5164 return;
5165
5166 ob = create_output_block (LTO_section_ipcp_transform);
5167 encoder = ob->decl_state->symtab_node_encoder;
5168 ob->symbol = NULL;
5169 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5170 lsei_next_function_in_partition (&lsei))
5171 {
5172 node = lsei_cgraph_node (lsei);
5173 if (node->has_gimple_body_p ()
5174 && ipa_get_agg_replacements_for_node (node) != NULL)
5175 count++;
5176 }
5177
5178 streamer_write_uhwi (ob, count);
5179
5180 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5181 lsei_next_function_in_partition (&lsei))
5182 {
5183 node = lsei_cgraph_node (lsei);
5184 if (node->has_gimple_body_p ()
5185 && ipa_get_agg_replacements_for_node (node) != NULL)
5186 write_agg_replacement_chain (ob, node);
5187 }
5188 streamer_write_char_stream (ob->main_stream, 0);
5189 produce_asm (ob, NULL);
5190 destroy_output_block (ob);
5191 }
5192
5193 /* Read replacements section in file FILE_DATA of length LEN with data
5194 DATA. */
5195
5196 static void
5197 read_replacements_section (struct lto_file_decl_data *file_data,
5198 const char *data,
5199 size_t len)
5200 {
5201 const struct lto_function_header *header =
5202 (const struct lto_function_header *) data;
5203 const int cfg_offset = sizeof (struct lto_function_header);
5204 const int main_offset = cfg_offset + header->cfg_size;
5205 const int string_offset = main_offset + header->main_size;
5206 struct data_in *data_in;
5207 unsigned int i;
5208 unsigned int count;
5209
5210 lto_input_block ib_main ((const char *) data + main_offset,
5211 header->main_size);
5212
5213 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5214 header->string_size, vNULL);
5215 count = streamer_read_uhwi (&ib_main);
5216
5217 for (i = 0; i < count; i++)
5218 {
5219 unsigned int index;
5220 struct cgraph_node *node;
5221 lto_symtab_encoder_t encoder;
5222
5223 index = streamer_read_uhwi (&ib_main);
5224 encoder = file_data->symtab_node_encoder;
5225 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5226 index));
5227 gcc_assert (node->definition);
5228 read_agg_replacement_chain (&ib_main, node, data_in);
5229 }
5230 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5231 len);
5232 lto_data_in_delete (data_in);
5233 }
5234
5235 /* Read IPA-CP aggregate replacements. */
5236
5237 void
5238 ipa_prop_read_all_agg_replacement (void)
5239 {
5240 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5241 struct lto_file_decl_data *file_data;
5242 unsigned int j = 0;
5243
5244 while ((file_data = file_data_vec[j++]))
5245 {
5246 size_t len;
5247 const char *data = lto_get_section_data (file_data,
5248 LTO_section_ipcp_transform,
5249 NULL, &len);
5250 if (data)
5251 read_replacements_section (file_data, data, len);
5252 }
5253 }
5254
5255 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5256 NODE. */
5257
5258 static void
5259 adjust_agg_replacement_values (struct cgraph_node *node,
5260 struct ipa_agg_replacement_value *aggval)
5261 {
5262 struct ipa_agg_replacement_value *v;
5263 int i, c = 0, d = 0, *adj;
5264
5265 if (!node->clone.combined_args_to_skip)
5266 return;
5267
5268 for (v = aggval; v; v = v->next)
5269 {
5270 gcc_assert (v->index >= 0);
5271 if (c < v->index)
5272 c = v->index;
5273 }
5274 c++;
5275
5276 adj = XALLOCAVEC (int, c);
5277 for (i = 0; i < c; i++)
5278 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5279 {
5280 adj[i] = -1;
5281 d++;
5282 }
5283 else
5284 adj[i] = i - d;
5285
5286 for (v = aggval; v; v = v->next)
5287 v->index = adj[v->index];
5288 }
5289
5290 /* Dominator walker driving the ipcp modification phase. */
5291
5292 class ipcp_modif_dom_walker : public dom_walker
5293 {
5294 public:
5295 ipcp_modif_dom_walker (struct func_body_info *fbi,
5296 vec<ipa_param_descriptor> descs,
5297 struct ipa_agg_replacement_value *av,
5298 bool *sc, bool *cc)
5299 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5300 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5301
5302 virtual void before_dom_children (basic_block);
5303
5304 private:
5305 struct func_body_info *m_fbi;
5306 vec<ipa_param_descriptor> m_descriptors;
5307 struct ipa_agg_replacement_value *m_aggval;
5308 bool *m_something_changed, *m_cfg_changed;
5309 };
5310
5311 void
5312 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5313 {
5314 gimple_stmt_iterator gsi;
5315 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5316 {
5317 struct ipa_agg_replacement_value *v;
5318 gimple stmt = gsi_stmt (gsi);
5319 tree rhs, val, t;
5320 HOST_WIDE_INT offset, size;
5321 int index;
5322 bool by_ref, vce;
5323
5324 if (!gimple_assign_load_p (stmt))
5325 continue;
5326 rhs = gimple_assign_rhs1 (stmt);
5327 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5328 continue;
5329
5330 vce = false;
5331 t = rhs;
5332 while (handled_component_p (t))
5333 {
5334 /* V_C_E can do things like convert an array of integers to one
5335 bigger integer and similar things we do not handle below. */
5336 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5337 {
5338 vce = true;
5339 break;
5340 }
5341 t = TREE_OPERAND (t, 0);
5342 }
5343 if (vce)
5344 continue;
5345
5346 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5347 &offset, &size, &by_ref))
5348 continue;
5349 for (v = m_aggval; v; v = v->next)
5350 if (v->index == index
5351 && v->offset == offset)
5352 break;
5353 if (!v
5354 || v->by_ref != by_ref
5355 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5356 continue;
5357
5358 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5359 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5360 {
5361 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5362 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5363 else if (TYPE_SIZE (TREE_TYPE (rhs))
5364 == TYPE_SIZE (TREE_TYPE (v->value)))
5365 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5366 else
5367 {
5368 if (dump_file)
5369 {
5370 fprintf (dump_file, " const ");
5371 print_generic_expr (dump_file, v->value, 0);
5372 fprintf (dump_file, " can't be converted to type of ");
5373 print_generic_expr (dump_file, rhs, 0);
5374 fprintf (dump_file, "\n");
5375 }
5376 continue;
5377 }
5378 }
5379 else
5380 val = v->value;
5381
5382 if (dump_file && (dump_flags & TDF_DETAILS))
5383 {
5384 fprintf (dump_file, "Modifying stmt:\n ");
5385 print_gimple_stmt (dump_file, stmt, 0, 0);
5386 }
5387 gimple_assign_set_rhs_from_tree (&gsi, val);
5388 update_stmt (stmt);
5389
5390 if (dump_file && (dump_flags & TDF_DETAILS))
5391 {
5392 fprintf (dump_file, "into:\n ");
5393 print_gimple_stmt (dump_file, stmt, 0, 0);
5394 fprintf (dump_file, "\n");
5395 }
5396
5397 *m_something_changed = true;
5398 if (maybe_clean_eh_stmt (stmt)
5399 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5400 *m_cfg_changed = true;
5401 }
5402
5403 }
5404
5405 /* IPCP transformation phase doing propagation of aggregate values. */
5406
5407 unsigned int
5408 ipcp_transform_function (struct cgraph_node *node)
5409 {
5410 vec<ipa_param_descriptor> descriptors = vNULL;
5411 struct func_body_info fbi;
5412 struct ipa_agg_replacement_value *aggval;
5413 int param_count;
5414 bool cfg_changed = false, something_changed = false;
5415
5416 gcc_checking_assert (cfun);
5417 gcc_checking_assert (current_function_decl);
5418
5419 if (dump_file)
5420 fprintf (dump_file, "Modification phase of node %s/%i\n",
5421 node->name (), node->order);
5422
5423 aggval = ipa_get_agg_replacements_for_node (node);
5424 if (!aggval)
5425 return 0;
5426 param_count = count_formal_params (node->decl);
5427 if (param_count == 0)
5428 return 0;
5429 adjust_agg_replacement_values (node, aggval);
5430 if (dump_file)
5431 ipa_dump_agg_replacement_values (dump_file, aggval);
5432
5433 fbi.node = node;
5434 fbi.info = NULL;
5435 fbi.bb_infos = vNULL;
5436 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5437 fbi.param_count = param_count;
5438 fbi.aa_walked = 0;
5439
5440 descriptors.safe_grow_cleared (param_count);
5441 ipa_populate_param_decls (node, descriptors);
5442 calculate_dominance_info (CDI_DOMINATORS);
5443 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5444 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5445
5446 int i;
5447 struct ipa_bb_info *bi;
5448 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5449 free_ipa_bb_info (bi);
5450 fbi.bb_infos.release ();
5451 free_dominance_info (CDI_DOMINATORS);
5452 (*ipa_node_agg_replacements)[node->uid] = NULL;
5453 descriptors.release ();
5454
5455 if (!something_changed)
5456 return 0;
5457 else if (cfg_changed)
5458 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5459 else
5460 return TODO_update_ssa_only_virtuals;
5461 }