devirt-46.C: New testcase.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "dbgcnt.h"
63 #include "domwalk.h"
64 #include "builtins.h"
65 #include "calls.h"
66
67 /* Intermediate information that we get from alias analysis about a particular
68 parameter in a particular basic_block. When a parameter or the memory it
69 references is marked modified, we use that information in all dominatd
70 blocks without cosulting alias analysis oracle. */
71
72 struct param_aa_status
73 {
74 /* Set when this structure contains meaningful information. If not, the
75 structure describing a dominating BB should be used instead. */
76 bool valid;
77
78 /* Whether we have seen something which might have modified the data in
79 question. PARM is for the parameter itself, REF is for data it points to
80 but using the alias type of individual accesses and PT is the same thing
81 but for computing aggregate pass-through functions using a very inclusive
82 ao_ref. */
83 bool parm_modified, ref_modified, pt_modified;
84 };
85
86 /* Information related to a given BB that used only when looking at function
87 body. */
88
89 struct ipa_bb_info
90 {
91 /* Call graph edges going out of this BB. */
92 vec<cgraph_edge *> cg_edges;
93 /* Alias analysis statuses of each formal parameter at this bb. */
94 vec<param_aa_status> param_aa_statuses;
95 };
96
97 /* Structure with global information that is only used when looking at function
98 body. */
99
100 struct func_body_info
101 {
102 /* The node that is being analyzed. */
103 cgraph_node *node;
104
105 /* Its info. */
106 struct ipa_node_params *info;
107
108 /* Information about individual BBs. */
109 vec<ipa_bb_info> bb_infos;
110
111 /* Number of parameters. */
112 int param_count;
113
114 /* Number of statements already walked by when analyzing this function. */
115 unsigned int aa_walked;
116 };
117
118 /* Vector where the parameter infos are actually stored. */
119 vec<ipa_node_params> ipa_node_params_vector;
120 /* Vector of known aggregate values in cloned nodes. */
121 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
122 /* Vector where the parameter infos are actually stored. */
123 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
124
125 /* Holders of ipa cgraph hooks: */
126 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
127 static struct cgraph_node_hook_list *node_removal_hook_holder;
128 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
129 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
130 static struct cgraph_node_hook_list *function_insertion_hook_holder;
131
132 /* Description of a reference to an IPA constant. */
133 struct ipa_cst_ref_desc
134 {
135 /* Edge that corresponds to the statement which took the reference. */
136 struct cgraph_edge *cs;
137 /* Linked list of duplicates created when call graph edges are cloned. */
138 struct ipa_cst_ref_desc *next_duplicate;
139 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
140 if out of control. */
141 int refcount;
142 };
143
144 /* Allocation pool for reference descriptions. */
145
146 static alloc_pool ipa_refdesc_pool;
147
148 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
149 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
150
151 static bool
152 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
153 {
154 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
155 struct cl_optimization *os;
156
157 if (!fs_opts)
158 return false;
159 os = TREE_OPTIMIZATION (fs_opts);
160 return !os->x_optimize || !os->x_flag_ipa_cp;
161 }
162
163 /* Return index of the formal whose tree is PTREE in function which corresponds
164 to INFO. */
165
166 static int
167 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
168 {
169 int i, count;
170
171 count = descriptors.length ();
172 for (i = 0; i < count; i++)
173 if (descriptors[i].decl == ptree)
174 return i;
175
176 return -1;
177 }
178
179 /* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
181
182 int
183 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
184 {
185 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
186 }
187
188 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
189 NODE. */
190
191 static void
192 ipa_populate_param_decls (struct cgraph_node *node,
193 vec<ipa_param_descriptor> &descriptors)
194 {
195 tree fndecl;
196 tree fnargs;
197 tree parm;
198 int param_num;
199
200 fndecl = node->decl;
201 gcc_assert (gimple_has_body_p (fndecl));
202 fnargs = DECL_ARGUMENTS (fndecl);
203 param_num = 0;
204 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
205 {
206 descriptors[param_num].decl = parm;
207 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
208 true);
209 param_num++;
210 }
211 }
212
213 /* Return how many formal parameters FNDECL has. */
214
215 int
216 count_formal_params (tree fndecl)
217 {
218 tree parm;
219 int count = 0;
220 gcc_assert (gimple_has_body_p (fndecl));
221
222 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
223 count++;
224
225 return count;
226 }
227
228 /* Return the declaration of Ith formal parameter of the function corresponding
229 to INFO. Note there is no setter function as this array is built just once
230 using ipa_initialize_node_params. */
231
232 void
233 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
234 {
235 fprintf (file, "param #%i", i);
236 if (info->descriptors[i].decl)
237 {
238 fprintf (file, " ");
239 print_generic_expr (file, info->descriptors[i].decl, 0);
240 }
241 }
242
243 /* Initialize the ipa_node_params structure associated with NODE
244 to hold PARAM_COUNT parameters. */
245
246 void
247 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
248 {
249 struct ipa_node_params *info = IPA_NODE_REF (node);
250
251 if (!info->descriptors.exists () && param_count)
252 info->descriptors.safe_grow_cleared (param_count);
253 }
254
255 /* Initialize the ipa_node_params structure associated with NODE by counting
256 the function parameters, creating the descriptors and populating their
257 param_decls. */
258
259 void
260 ipa_initialize_node_params (struct cgraph_node *node)
261 {
262 struct ipa_node_params *info = IPA_NODE_REF (node);
263
264 if (!info->descriptors.exists ())
265 {
266 ipa_alloc_node_params (node, count_formal_params (node->decl));
267 ipa_populate_param_decls (node, info->descriptors);
268 }
269 }
270
271 /* Print the jump functions associated with call graph edge CS to file F. */
272
273 static void
274 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
275 {
276 int i, count;
277
278 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
279 for (i = 0; i < count; i++)
280 {
281 struct ipa_jump_func *jump_func;
282 enum jump_func_type type;
283
284 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
285 type = jump_func->type;
286
287 fprintf (f, " param %d: ", i);
288 if (type == IPA_JF_UNKNOWN)
289 fprintf (f, "UNKNOWN\n");
290 else if (type == IPA_JF_KNOWN_TYPE)
291 {
292 fprintf (f, "KNOWN TYPE: base ");
293 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
294 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
295 jump_func->value.known_type.offset);
296 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
297 fprintf (f, "\n");
298 }
299 else if (type == IPA_JF_CONST)
300 {
301 tree val = jump_func->value.constant.value;
302 fprintf (f, "CONST: ");
303 print_generic_expr (f, val, 0);
304 if (TREE_CODE (val) == ADDR_EXPR
305 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
306 {
307 fprintf (f, " -> ");
308 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
309 0);
310 }
311 fprintf (f, "\n");
312 }
313 else if (type == IPA_JF_PASS_THROUGH)
314 {
315 fprintf (f, "PASS THROUGH: ");
316 fprintf (f, "%d, op %s",
317 jump_func->value.pass_through.formal_id,
318 get_tree_code_name(jump_func->value.pass_through.operation));
319 if (jump_func->value.pass_through.operation != NOP_EXPR)
320 {
321 fprintf (f, " ");
322 print_generic_expr (f,
323 jump_func->value.pass_through.operand, 0);
324 }
325 if (jump_func->value.pass_through.agg_preserved)
326 fprintf (f, ", agg_preserved");
327 if (jump_func->value.pass_through.type_preserved)
328 fprintf (f, ", type_preserved");
329 fprintf (f, "\n");
330 }
331 else if (type == IPA_JF_ANCESTOR)
332 {
333 fprintf (f, "ANCESTOR: ");
334 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
335 jump_func->value.ancestor.formal_id,
336 jump_func->value.ancestor.offset);
337 print_generic_expr (f, jump_func->value.ancestor.type, 0);
338 if (jump_func->value.ancestor.agg_preserved)
339 fprintf (f, ", agg_preserved");
340 if (jump_func->value.ancestor.type_preserved)
341 fprintf (f, ", type_preserved");
342 fprintf (f, "\n");
343 }
344
345 if (jump_func->agg.items)
346 {
347 struct ipa_agg_jf_item *item;
348 int j;
349
350 fprintf (f, " Aggregate passed by %s:\n",
351 jump_func->agg.by_ref ? "reference" : "value");
352 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
353 {
354 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
355 item->offset);
356 if (TYPE_P (item->value))
357 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
358 tree_to_uhwi (TYPE_SIZE (item->value)));
359 else
360 {
361 fprintf (f, "cst: ");
362 print_generic_expr (f, item->value, 0);
363 }
364 fprintf (f, "\n");
365 }
366 }
367 if (IPA_EDGE_REF (cs)->polymorphic_call_contexts)
368 ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i)->dump (f);
369 }
370 }
371
372
373 /* Print the jump functions of all arguments on all call graph edges going from
374 NODE to file F. */
375
376 void
377 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
378 {
379 struct cgraph_edge *cs;
380
381 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
382 node->order);
383 for (cs = node->callees; cs; cs = cs->next_callee)
384 {
385 if (!ipa_edge_args_info_available_for_edge_p (cs))
386 continue;
387
388 fprintf (f, " callsite %s/%i -> %s/%i : \n",
389 xstrdup (node->name ()), node->order,
390 xstrdup (cs->callee->name ()),
391 cs->callee->order);
392 ipa_print_node_jump_functions_for_edge (f, cs);
393 }
394
395 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
396 {
397 struct cgraph_indirect_call_info *ii;
398 if (!ipa_edge_args_info_available_for_edge_p (cs))
399 continue;
400
401 ii = cs->indirect_info;
402 if (ii->agg_contents)
403 fprintf (f, " indirect %s callsite, calling param %i, "
404 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
405 ii->member_ptr ? "member ptr" : "aggregate",
406 ii->param_index, ii->offset,
407 ii->by_ref ? "by reference" : "by_value");
408 else
409 fprintf (f, " indirect %s callsite, calling param %i, "
410 "offset " HOST_WIDE_INT_PRINT_DEC,
411 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
412 ii->offset);
413
414 if (cs->call_stmt)
415 {
416 fprintf (f, ", for stmt ");
417 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
418 }
419 else
420 fprintf (f, "\n");
421 if (ii->polymorphic)
422 ii->context.dump (f);
423 ipa_print_node_jump_functions_for_edge (f, cs);
424 }
425 }
426
427 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
428
429 void
430 ipa_print_all_jump_functions (FILE *f)
431 {
432 struct cgraph_node *node;
433
434 fprintf (f, "\nJump functions:\n");
435 FOR_EACH_FUNCTION (node)
436 {
437 ipa_print_node_jump_functions (f, node);
438 }
439 }
440
441 /* Set JFUNC to be a known type jump function. */
442
443 static void
444 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
445 tree base_type, tree component_type)
446 {
447 /* Recording and propagating main variants increases change that types
448 will match. */
449 base_type = TYPE_MAIN_VARIANT (base_type);
450 component_type = TYPE_MAIN_VARIANT (component_type);
451
452 gcc_assert (contains_polymorphic_type_p (base_type)
453 && contains_polymorphic_type_p (component_type));
454 if (!flag_devirtualize)
455 return;
456 jfunc->type = IPA_JF_KNOWN_TYPE;
457 jfunc->value.known_type.offset = offset,
458 jfunc->value.known_type.base_type = base_type;
459 jfunc->value.known_type.component_type = component_type;
460 gcc_assert (component_type);
461 }
462
463 /* Set JFUNC to be a copy of another jmp (to be used by jump function
464 combination code). The two functions will share their rdesc. */
465
466 static void
467 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
468 struct ipa_jump_func *src)
469
470 {
471 gcc_checking_assert (src->type == IPA_JF_CONST);
472 dst->type = IPA_JF_CONST;
473 dst->value.constant = src->value.constant;
474 }
475
476 /* Set JFUNC to be a constant jmp function. */
477
478 static void
479 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
480 struct cgraph_edge *cs)
481 {
482 constant = unshare_expr (constant);
483 if (constant && EXPR_P (constant))
484 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
485 jfunc->type = IPA_JF_CONST;
486 jfunc->value.constant.value = unshare_expr_without_location (constant);
487
488 if (TREE_CODE (constant) == ADDR_EXPR
489 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
490 {
491 struct ipa_cst_ref_desc *rdesc;
492 if (!ipa_refdesc_pool)
493 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
494 sizeof (struct ipa_cst_ref_desc), 32);
495
496 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
497 rdesc->cs = cs;
498 rdesc->next_duplicate = NULL;
499 rdesc->refcount = 1;
500 jfunc->value.constant.rdesc = rdesc;
501 }
502 else
503 jfunc->value.constant.rdesc = NULL;
504 }
505
506 /* Set JFUNC to be a simple pass-through jump function. */
507 static void
508 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
509 bool agg_preserved, bool type_preserved)
510 {
511 jfunc->type = IPA_JF_PASS_THROUGH;
512 jfunc->value.pass_through.operand = NULL_TREE;
513 jfunc->value.pass_through.formal_id = formal_id;
514 jfunc->value.pass_through.operation = NOP_EXPR;
515 jfunc->value.pass_through.agg_preserved = agg_preserved;
516 jfunc->value.pass_through.type_preserved = type_preserved;
517 }
518
519 /* Set JFUNC to be an arithmetic pass through jump function. */
520
521 static void
522 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
523 tree operand, enum tree_code operation)
524 {
525 jfunc->type = IPA_JF_PASS_THROUGH;
526 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
527 jfunc->value.pass_through.formal_id = formal_id;
528 jfunc->value.pass_through.operation = operation;
529 jfunc->value.pass_through.agg_preserved = false;
530 jfunc->value.pass_through.type_preserved = false;
531 }
532
533 /* Set JFUNC to be an ancestor jump function. */
534
535 static void
536 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
537 tree type, int formal_id, bool agg_preserved,
538 bool type_preserved)
539 {
540 if (!flag_devirtualize)
541 type_preserved = false;
542 if (!type_preserved)
543 type = NULL_TREE;
544 if (type)
545 type = TYPE_MAIN_VARIANT (type);
546 gcc_assert (!type_preserved || contains_polymorphic_type_p (type));
547 jfunc->type = IPA_JF_ANCESTOR;
548 jfunc->value.ancestor.formal_id = formal_id;
549 jfunc->value.ancestor.offset = offset;
550 jfunc->value.ancestor.type = type_preserved ? type : NULL;
551 jfunc->value.ancestor.agg_preserved = agg_preserved;
552 jfunc->value.ancestor.type_preserved = type_preserved;
553 }
554
555 /* Extract the acual BINFO being described by JFUNC which must be a known type
556 jump function. */
557
558 tree
559 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
560 {
561 if (!RECORD_OR_UNION_TYPE_P (jfunc->value.known_type.base_type))
562 return NULL_TREE;
563
564 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
565
566 if (!base_binfo)
567 return NULL_TREE;
568 /* FIXME: At LTO we can't propagate to non-polymorphic type, because
569 we have no ODR equivalency on those. This should be fixed by
570 propagating on types rather than binfos that would make type
571 matching here unnecesary. */
572 if (in_lto_p
573 && (TREE_CODE (jfunc->value.known_type.component_type) != RECORD_TYPE
574 || !TYPE_BINFO (jfunc->value.known_type.component_type)
575 || !BINFO_VTABLE (TYPE_BINFO (jfunc->value.known_type.component_type))))
576 {
577 if (!jfunc->value.known_type.offset)
578 return base_binfo;
579 return NULL;
580 }
581 return get_binfo_at_offset (base_binfo,
582 jfunc->value.known_type.offset,
583 jfunc->value.known_type.component_type);
584 }
585
586 /* Get IPA BB information about the given BB. FBI is the context of analyzis
587 of this function body. */
588
589 static struct ipa_bb_info *
590 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
591 {
592 gcc_checking_assert (fbi);
593 return &fbi->bb_infos[bb->index];
594 }
595
596 /* Structure to be passed in between detect_type_change and
597 check_stmt_for_type_change. */
598
599 struct prop_type_change_info
600 {
601 /* Offset into the object where there is the virtual method pointer we are
602 looking for. */
603 HOST_WIDE_INT offset;
604 /* The declaration or SSA_NAME pointer of the base that we are checking for
605 type change. */
606 tree object;
607 /* If we actually can tell the type that the object has changed to, it is
608 stored in this field. Otherwise it remains NULL_TREE. */
609 tree known_current_type;
610 /* Set to true if dynamic type change has been detected. */
611 bool type_maybe_changed;
612 /* Set to true if multiple types have been encountered. known_current_type
613 must be disregarded in that case. */
614 bool multiple_types_encountered;
615 };
616
617 /* Return true if STMT can modify a virtual method table pointer.
618
619 This function makes special assumptions about both constructors and
620 destructors which are all the functions that are allowed to alter the VMT
621 pointers. It assumes that destructors begin with assignment into all VMT
622 pointers and that constructors essentially look in the following way:
623
624 1) The very first thing they do is that they call constructors of ancestor
625 sub-objects that have them.
626
627 2) Then VMT pointers of this and all its ancestors is set to new values
628 corresponding to the type corresponding to the constructor.
629
630 3) Only afterwards, other stuff such as constructor of member sub-objects
631 and the code written by the user is run. Only this may include calling
632 virtual functions, directly or indirectly.
633
634 There is no way to call a constructor of an ancestor sub-object in any
635 other way.
636
637 This means that we do not have to care whether constructors get the correct
638 type information because they will always change it (in fact, if we define
639 the type to be given by the VMT pointer, it is undefined).
640
641 The most important fact to derive from the above is that if, for some
642 statement in the section 3, we try to detect whether the dynamic type has
643 changed, we can safely ignore all calls as we examine the function body
644 backwards until we reach statements in section 2 because these calls cannot
645 be ancestor constructors or destructors (if the input is not bogus) and so
646 do not change the dynamic type (this holds true only for automatically
647 allocated objects but at the moment we devirtualize only these). We then
648 must detect that statements in section 2 change the dynamic type and can try
649 to derive the new type. That is enough and we can stop, we will never see
650 the calls into constructors of sub-objects in this code. Therefore we can
651 safely ignore all call statements that we traverse.
652 */
653
654 static bool
655 stmt_may_be_vtbl_ptr_store (gimple stmt)
656 {
657 if (is_gimple_call (stmt))
658 return false;
659 if (gimple_clobber_p (stmt))
660 return false;
661 else if (is_gimple_assign (stmt))
662 {
663 tree lhs = gimple_assign_lhs (stmt);
664
665 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
666 {
667 if (flag_strict_aliasing
668 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
669 return false;
670
671 if (TREE_CODE (lhs) == COMPONENT_REF
672 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
673 return false;
674 /* In the future we might want to use get_base_ref_and_offset to find
675 if there is a field corresponding to the offset and if so, proceed
676 almost like if it was a component ref. */
677 }
678 }
679 return true;
680 }
681
682 /* If STMT can be proved to be an assignment to the virtual method table
683 pointer of ANALYZED_OBJ and the type associated with the new table
684 identified, return the type. Otherwise return NULL_TREE. */
685
686 static tree
687 extr_type_from_vtbl_ptr_store (gimple stmt, struct prop_type_change_info *tci)
688 {
689 HOST_WIDE_INT offset, size, max_size;
690 tree lhs, rhs, base, binfo;
691
692 if (!gimple_assign_single_p (stmt))
693 return NULL_TREE;
694
695 lhs = gimple_assign_lhs (stmt);
696 rhs = gimple_assign_rhs1 (stmt);
697 if (TREE_CODE (lhs) != COMPONENT_REF
698 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
699 return NULL_TREE;
700
701 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
702 if (offset != tci->offset
703 || size != POINTER_SIZE
704 || max_size != POINTER_SIZE)
705 return NULL_TREE;
706 if (TREE_CODE (base) == MEM_REF)
707 {
708 if (TREE_CODE (tci->object) != MEM_REF
709 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
710 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
711 TREE_OPERAND (base, 1)))
712 return NULL_TREE;
713 }
714 else if (tci->object != base)
715 return NULL_TREE;
716
717 binfo = vtable_pointer_value_to_binfo (rhs);
718
719 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
720 base of outer type. In this case we would need to either
721 work on binfos or translate it back to outer type and offset.
722 KNOWN_TYPE jump functions are not ready for that, yet. */
723 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
724 return NULL;
725
726 return BINFO_TYPE (binfo);
727 }
728
729 /* Callback of walk_aliased_vdefs and a helper function for
730 detect_type_change to check whether a particular statement may modify
731 the virtual table pointer, and if possible also determine the new type of
732 the (sub-)object. It stores its result into DATA, which points to a
733 prop_type_change_info structure. */
734
735 static bool
736 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
737 {
738 gimple stmt = SSA_NAME_DEF_STMT (vdef);
739 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
740
741 if (stmt_may_be_vtbl_ptr_store (stmt))
742 {
743 tree type;
744
745 type = extr_type_from_vtbl_ptr_store (stmt, tci);
746 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
747 if (tci->type_maybe_changed
748 && type != tci->known_current_type)
749 tci->multiple_types_encountered = true;
750 tci->known_current_type = type;
751 tci->type_maybe_changed = true;
752 return true;
753 }
754 else
755 return false;
756 }
757
758 /* See if ARG is PARAM_DECl describing instance passed by pointer
759 or reference in FUNCTION. Return false if the dynamic type may change
760 in between beggining of the function until CALL is invoked.
761
762 Generally functions are not allowed to change type of such instances,
763 but they call destructors. We assume that methods can not destroy the THIS
764 pointer. Also as a special cases, constructor and destructors may change
765 type of the THIS pointer. */
766
767 static bool
768 param_type_may_change_p (tree function, tree arg, gimple call)
769 {
770 /* Pure functions can not do any changes on the dynamic type;
771 that require writting to memory. */
772 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
773 return false;
774 /* We need to check if we are within inlined consturctor
775 or destructor (ideally we would have way to check that the
776 inline cdtor is actually working on ARG, but we don't have
777 easy tie on this, so punt on all non-pure cdtors.
778 We may also record the types of cdtors and once we know type
779 of the instance match them.
780
781 Also code unification optimizations may merge calls from
782 different blocks making return values unreliable. So
783 do nothing during late optimization. */
784 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
785 return true;
786 if (TREE_CODE (arg) == SSA_NAME
787 && SSA_NAME_IS_DEFAULT_DEF (arg)
788 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
789 {
790 /* Normal (non-THIS) argument. */
791 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
792 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
793 /* THIS pointer of an method - here we we want to watch constructors
794 and destructors as those definitely may change the dynamic
795 type. */
796 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
797 && !DECL_CXX_CONSTRUCTOR_P (function)
798 && !DECL_CXX_DESTRUCTOR_P (function)
799 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
800 {
801 /* Walk the inline stack and watch out for ctors/dtors. */
802 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
803 block = BLOCK_SUPERCONTEXT (block))
804 if (BLOCK_ABSTRACT_ORIGIN (block)
805 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
806 {
807 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
808
809 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
810 continue;
811 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
812 && (DECL_CXX_CONSTRUCTOR_P (fn)
813 || DECL_CXX_DESTRUCTOR_P (fn)))
814 return true;
815 }
816 return false;
817 }
818 }
819 return true;
820 }
821
822 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
823 callsite CALL) by looking for assignments to its virtual table pointer. If
824 it is, return true and fill in the jump function JFUNC with relevant type
825 information or set it to unknown. ARG is the object itself (not a pointer
826 to it, unless dereferenced). BASE is the base of the memory access as
827 returned by get_ref_base_and_extent, as is the offset.
828
829 This is helper function for detect_type_change and detect_type_change_ssa
830 that does the heavy work which is usually unnecesary. */
831
832 static bool
833 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
834 gimple call, struct ipa_jump_func *jfunc,
835 HOST_WIDE_INT offset)
836 {
837 struct prop_type_change_info tci;
838 ao_ref ao;
839 bool entry_reached = false;
840
841 gcc_checking_assert (DECL_P (arg)
842 || TREE_CODE (arg) == MEM_REF
843 || handled_component_p (arg));
844
845 comp_type = TYPE_MAIN_VARIANT (comp_type);
846
847 /* Const calls cannot call virtual methods through VMT and so type changes do
848 not matter. */
849 if (!flag_devirtualize || !gimple_vuse (call)
850 /* Be sure expected_type is polymorphic. */
851 || !comp_type
852 || TREE_CODE (comp_type) != RECORD_TYPE
853 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
854 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
855 return true;
856
857 ao_ref_init (&ao, arg);
858 ao.base = base;
859 ao.offset = offset;
860 ao.size = POINTER_SIZE;
861 ao.max_size = ao.size;
862
863 tci.offset = offset;
864 tci.object = get_base_address (arg);
865 tci.known_current_type = NULL_TREE;
866 tci.type_maybe_changed = false;
867 tci.multiple_types_encountered = false;
868
869 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
870 &tci, NULL, &entry_reached);
871 if (!tci.type_maybe_changed)
872 return false;
873
874 if (!tci.known_current_type
875 || tci.multiple_types_encountered
876 || offset != 0
877 /* When the walk reached function entry, it means that type
878 is set along some paths but not along others. */
879 || entry_reached)
880 jfunc->type = IPA_JF_UNKNOWN;
881 else
882 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
883
884 return true;
885 }
886
887 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
888 If it is, return true and fill in the jump function JFUNC with relevant type
889 information or set it to unknown. ARG is the object itself (not a pointer
890 to it, unless dereferenced). BASE is the base of the memory access as
891 returned by get_ref_base_and_extent, as is the offset. */
892
893 static bool
894 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
895 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
896 {
897 if (!flag_devirtualize)
898 return false;
899
900 if (TREE_CODE (base) == MEM_REF
901 && !param_type_may_change_p (current_function_decl,
902 TREE_OPERAND (base, 0),
903 call))
904 return false;
905 return detect_type_change_from_memory_writes (arg, base, comp_type,
906 call, jfunc, offset);
907 }
908
909 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
910 SSA name (its dereference will become the base and the offset is assumed to
911 be zero). */
912
913 static bool
914 detect_type_change_ssa (tree arg, tree comp_type,
915 gimple call, struct ipa_jump_func *jfunc)
916 {
917 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
918 if (!flag_devirtualize
919 || !POINTER_TYPE_P (TREE_TYPE (arg)))
920 return false;
921
922 if (!param_type_may_change_p (current_function_decl, arg, call))
923 return false;
924
925 arg = build2 (MEM_REF, ptr_type_node, arg,
926 build_int_cst (ptr_type_node, 0));
927
928 return detect_type_change_from_memory_writes (arg, arg, comp_type,
929 call, jfunc, 0);
930 }
931
932 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
933 boolean variable pointed to by DATA. */
934
935 static bool
936 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
937 void *data)
938 {
939 bool *b = (bool *) data;
940 *b = true;
941 return true;
942 }
943
944 /* Return true if we have already walked so many statements in AA that we
945 should really just start giving up. */
946
947 static bool
948 aa_overwalked (struct func_body_info *fbi)
949 {
950 gcc_checking_assert (fbi);
951 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
952 }
953
954 /* Find the nearest valid aa status for parameter specified by INDEX that
955 dominates BB. */
956
957 static struct param_aa_status *
958 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
959 int index)
960 {
961 while (true)
962 {
963 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
964 if (!bb)
965 return NULL;
966 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
967 if (!bi->param_aa_statuses.is_empty ()
968 && bi->param_aa_statuses[index].valid)
969 return &bi->param_aa_statuses[index];
970 }
971 }
972
973 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
974 structures and/or intialize the result with a dominating description as
975 necessary. */
976
977 static struct param_aa_status *
978 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
979 int index)
980 {
981 gcc_checking_assert (fbi);
982 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
983 if (bi->param_aa_statuses.is_empty ())
984 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
985 struct param_aa_status *paa = &bi->param_aa_statuses[index];
986 if (!paa->valid)
987 {
988 gcc_checking_assert (!paa->parm_modified
989 && !paa->ref_modified
990 && !paa->pt_modified);
991 struct param_aa_status *dom_paa;
992 dom_paa = find_dominating_aa_status (fbi, bb, index);
993 if (dom_paa)
994 *paa = *dom_paa;
995 else
996 paa->valid = true;
997 }
998
999 return paa;
1000 }
1001
1002 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1003 a value known not to be modified in this function before reaching the
1004 statement STMT. FBI holds information about the function we have so far
1005 gathered but do not survive the summary building stage. */
1006
1007 static bool
1008 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
1009 gimple stmt, tree parm_load)
1010 {
1011 struct param_aa_status *paa;
1012 bool modified = false;
1013 ao_ref refd;
1014
1015 /* FIXME: FBI can be NULL if we are being called from outside
1016 ipa_node_analysis or ipcp_transform_function, which currently happens
1017 during inlining analysis. It would be great to extend fbi's lifetime and
1018 always have it. Currently, we are just not afraid of too much walking in
1019 that case. */
1020 if (fbi)
1021 {
1022 if (aa_overwalked (fbi))
1023 return false;
1024 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1025 if (paa->parm_modified)
1026 return false;
1027 }
1028 else
1029 paa = NULL;
1030
1031 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1032 ao_ref_init (&refd, parm_load);
1033 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1034 &modified, NULL);
1035 if (fbi)
1036 fbi->aa_walked += walked;
1037 if (paa && modified)
1038 paa->parm_modified = true;
1039 return !modified;
1040 }
1041
1042 /* If STMT is an assignment that loads a value from an parameter declaration,
1043 return the index of the parameter in ipa_node_params which has not been
1044 modified. Otherwise return -1. */
1045
1046 static int
1047 load_from_unmodified_param (struct func_body_info *fbi,
1048 vec<ipa_param_descriptor> descriptors,
1049 gimple stmt)
1050 {
1051 int index;
1052 tree op1;
1053
1054 if (!gimple_assign_single_p (stmt))
1055 return -1;
1056
1057 op1 = gimple_assign_rhs1 (stmt);
1058 if (TREE_CODE (op1) != PARM_DECL)
1059 return -1;
1060
1061 index = ipa_get_param_decl_index_1 (descriptors, op1);
1062 if (index < 0
1063 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1064 return -1;
1065
1066 return index;
1067 }
1068
1069 /* Return true if memory reference REF (which must be a load through parameter
1070 with INDEX) loads data that are known to be unmodified in this function
1071 before reaching statement STMT. */
1072
1073 static bool
1074 parm_ref_data_preserved_p (struct func_body_info *fbi,
1075 int index, gimple stmt, tree ref)
1076 {
1077 struct param_aa_status *paa;
1078 bool modified = false;
1079 ao_ref refd;
1080
1081 /* FIXME: FBI can be NULL if we are being called from outside
1082 ipa_node_analysis or ipcp_transform_function, which currently happens
1083 during inlining analysis. It would be great to extend fbi's lifetime and
1084 always have it. Currently, we are just not afraid of too much walking in
1085 that case. */
1086 if (fbi)
1087 {
1088 if (aa_overwalked (fbi))
1089 return false;
1090 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1091 if (paa->ref_modified)
1092 return false;
1093 }
1094 else
1095 paa = NULL;
1096
1097 gcc_checking_assert (gimple_vuse (stmt));
1098 ao_ref_init (&refd, ref);
1099 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1100 &modified, NULL);
1101 if (fbi)
1102 fbi->aa_walked += walked;
1103 if (paa && modified)
1104 paa->ref_modified = true;
1105 return !modified;
1106 }
1107
1108 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1109 is known to be unmodified in this function before reaching call statement
1110 CALL into which it is passed. FBI describes the function body. */
1111
1112 static bool
1113 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1114 gimple call, tree parm)
1115 {
1116 bool modified = false;
1117 ao_ref refd;
1118
1119 /* It's unnecessary to calculate anything about memory contnets for a const
1120 function because it is not goin to use it. But do not cache the result
1121 either. Also, no such calculations for non-pointers. */
1122 if (!gimple_vuse (call)
1123 || !POINTER_TYPE_P (TREE_TYPE (parm))
1124 || aa_overwalked (fbi))
1125 return false;
1126
1127 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1128 index);
1129 if (paa->pt_modified)
1130 return false;
1131
1132 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1133 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1134 &modified, NULL);
1135 fbi->aa_walked += walked;
1136 if (modified)
1137 paa->pt_modified = true;
1138 return !modified;
1139 }
1140
1141 /* Return true if we can prove that OP is a memory reference loading unmodified
1142 data from an aggregate passed as a parameter and if the aggregate is passed
1143 by reference, that the alias type of the load corresponds to the type of the
1144 formal parameter (so that we can rely on this type for TBAA in callers).
1145 INFO and PARMS_AINFO describe parameters of the current function (but the
1146 latter can be NULL), STMT is the load statement. If function returns true,
1147 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1148 within the aggregate and whether it is a load from a value passed by
1149 reference respectively. */
1150
1151 static bool
1152 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1153 vec<ipa_param_descriptor> descriptors,
1154 gimple stmt, tree op, int *index_p,
1155 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1156 bool *by_ref_p)
1157 {
1158 int index;
1159 HOST_WIDE_INT size, max_size;
1160 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1161
1162 if (max_size == -1 || max_size != size || *offset_p < 0)
1163 return false;
1164
1165 if (DECL_P (base))
1166 {
1167 int index = ipa_get_param_decl_index_1 (descriptors, base);
1168 if (index >= 0
1169 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1170 {
1171 *index_p = index;
1172 *by_ref_p = false;
1173 if (size_p)
1174 *size_p = size;
1175 return true;
1176 }
1177 return false;
1178 }
1179
1180 if (TREE_CODE (base) != MEM_REF
1181 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1182 || !integer_zerop (TREE_OPERAND (base, 1)))
1183 return false;
1184
1185 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1186 {
1187 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1188 index = ipa_get_param_decl_index_1 (descriptors, parm);
1189 }
1190 else
1191 {
1192 /* This branch catches situations where a pointer parameter is not a
1193 gimple register, for example:
1194
1195 void hip7(S*) (struct S * p)
1196 {
1197 void (*<T2e4>) (struct S *) D.1867;
1198 struct S * p.1;
1199
1200 <bb 2>:
1201 p.1_1 = p;
1202 D.1867_2 = p.1_1->f;
1203 D.1867_2 ();
1204 gdp = &p;
1205 */
1206
1207 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1208 index = load_from_unmodified_param (fbi, descriptors, def);
1209 }
1210
1211 if (index >= 0
1212 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1213 {
1214 *index_p = index;
1215 *by_ref_p = true;
1216 if (size_p)
1217 *size_p = size;
1218 return true;
1219 }
1220 return false;
1221 }
1222
1223 /* Just like the previous function, just without the param_analysis_info
1224 pointer, for users outside of this file. */
1225
1226 bool
1227 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1228 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1229 bool *by_ref_p)
1230 {
1231 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1232 offset_p, NULL, by_ref_p);
1233 }
1234
1235 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1236 of an assignment statement STMT, try to determine whether we are actually
1237 handling any of the following cases and construct an appropriate jump
1238 function into JFUNC if so:
1239
1240 1) The passed value is loaded from a formal parameter which is not a gimple
1241 register (most probably because it is addressable, the value has to be
1242 scalar) and we can guarantee the value has not changed. This case can
1243 therefore be described by a simple pass-through jump function. For example:
1244
1245 foo (int a)
1246 {
1247 int a.0;
1248
1249 a.0_2 = a;
1250 bar (a.0_2);
1251
1252 2) The passed value can be described by a simple arithmetic pass-through
1253 jump function. E.g.
1254
1255 foo (int a)
1256 {
1257 int D.2064;
1258
1259 D.2064_4 = a.1(D) + 4;
1260 bar (D.2064_4);
1261
1262 This case can also occur in combination of the previous one, e.g.:
1263
1264 foo (int a, int z)
1265 {
1266 int a.0;
1267 int D.2064;
1268
1269 a.0_3 = a;
1270 D.2064_4 = a.0_3 + 4;
1271 foo (D.2064_4);
1272
1273 3) The passed value is an address of an object within another one (which
1274 also passed by reference). Such situations are described by an ancestor
1275 jump function and describe situations such as:
1276
1277 B::foo() (struct B * const this)
1278 {
1279 struct A * D.1845;
1280
1281 D.1845_2 = &this_1(D)->D.1748;
1282 A::bar (D.1845_2);
1283
1284 INFO is the structure describing individual parameters access different
1285 stages of IPA optimizations. PARMS_AINFO contains the information that is
1286 only needed for intraprocedural analysis. */
1287
1288 static void
1289 compute_complex_assign_jump_func (struct func_body_info *fbi,
1290 struct ipa_node_params *info,
1291 struct ipa_jump_func *jfunc,
1292 gimple call, gimple stmt, tree name,
1293 tree param_type)
1294 {
1295 HOST_WIDE_INT offset, size, max_size;
1296 tree op1, tc_ssa, base, ssa;
1297 int index;
1298
1299 op1 = gimple_assign_rhs1 (stmt);
1300
1301 if (TREE_CODE (op1) == SSA_NAME)
1302 {
1303 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1304 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1305 else
1306 index = load_from_unmodified_param (fbi, info->descriptors,
1307 SSA_NAME_DEF_STMT (op1));
1308 tc_ssa = op1;
1309 }
1310 else
1311 {
1312 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1313 tc_ssa = gimple_assign_lhs (stmt);
1314 }
1315
1316 if (index >= 0)
1317 {
1318 tree op2 = gimple_assign_rhs2 (stmt);
1319
1320 if (op2)
1321 {
1322 if (!is_gimple_ip_invariant (op2)
1323 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1324 && !useless_type_conversion_p (TREE_TYPE (name),
1325 TREE_TYPE (op1))))
1326 return;
1327
1328 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1329 gimple_assign_rhs_code (stmt));
1330 }
1331 else if (gimple_assign_single_p (stmt))
1332 {
1333 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1334 bool type_p = false;
1335
1336 if (param_type && POINTER_TYPE_P (param_type))
1337 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1338 call, jfunc);
1339 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1340 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1341 }
1342 return;
1343 }
1344
1345 if (TREE_CODE (op1) != ADDR_EXPR)
1346 return;
1347 op1 = TREE_OPERAND (op1, 0);
1348 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1349 return;
1350 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1351 if (TREE_CODE (base) != MEM_REF
1352 /* If this is a varying address, punt. */
1353 || max_size == -1
1354 || max_size != size)
1355 return;
1356 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1357 ssa = TREE_OPERAND (base, 0);
1358 if (TREE_CODE (ssa) != SSA_NAME
1359 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1360 || offset < 0)
1361 return;
1362
1363 /* Dynamic types are changed in constructors and destructors. */
1364 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1365 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1366 {
1367 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1368 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1369 call, jfunc, offset));
1370 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1371 ipa_set_ancestor_jf (jfunc, offset,
1372 type_p ? TREE_TYPE (param_type) : NULL, index,
1373 parm_ref_data_pass_through_p (fbi, index,
1374 call, ssa), type_p);
1375 }
1376 }
1377
1378 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1379 it looks like:
1380
1381 iftmp.1_3 = &obj_2(D)->D.1762;
1382
1383 The base of the MEM_REF must be a default definition SSA NAME of a
1384 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1385 whole MEM_REF expression is returned and the offset calculated from any
1386 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1387 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1388
1389 static tree
1390 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1391 {
1392 HOST_WIDE_INT size, max_size;
1393 tree expr, parm, obj;
1394
1395 if (!gimple_assign_single_p (assign))
1396 return NULL_TREE;
1397 expr = gimple_assign_rhs1 (assign);
1398
1399 if (TREE_CODE (expr) != ADDR_EXPR)
1400 return NULL_TREE;
1401 expr = TREE_OPERAND (expr, 0);
1402 obj = expr;
1403 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1404
1405 if (TREE_CODE (expr) != MEM_REF
1406 /* If this is a varying address, punt. */
1407 || max_size == -1
1408 || max_size != size
1409 || *offset < 0)
1410 return NULL_TREE;
1411 parm = TREE_OPERAND (expr, 0);
1412 if (TREE_CODE (parm) != SSA_NAME
1413 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1414 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1415 return NULL_TREE;
1416
1417 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1418 *obj_p = obj;
1419 return expr;
1420 }
1421
1422
1423 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1424 statement PHI, try to find out whether NAME is in fact a
1425 multiple-inheritance typecast from a descendant into an ancestor of a formal
1426 parameter and thus can be described by an ancestor jump function and if so,
1427 write the appropriate function into JFUNC.
1428
1429 Essentially we want to match the following pattern:
1430
1431 if (obj_2(D) != 0B)
1432 goto <bb 3>;
1433 else
1434 goto <bb 4>;
1435
1436 <bb 3>:
1437 iftmp.1_3 = &obj_2(D)->D.1762;
1438
1439 <bb 4>:
1440 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1441 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1442 return D.1879_6; */
1443
1444 static void
1445 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1446 struct ipa_node_params *info,
1447 struct ipa_jump_func *jfunc,
1448 gimple call, gimple phi, tree param_type)
1449 {
1450 HOST_WIDE_INT offset;
1451 gimple assign, cond;
1452 basic_block phi_bb, assign_bb, cond_bb;
1453 tree tmp, parm, expr, obj;
1454 int index, i;
1455
1456 if (gimple_phi_num_args (phi) != 2)
1457 return;
1458
1459 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1460 tmp = PHI_ARG_DEF (phi, 0);
1461 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1462 tmp = PHI_ARG_DEF (phi, 1);
1463 else
1464 return;
1465 if (TREE_CODE (tmp) != SSA_NAME
1466 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1467 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1468 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1469 return;
1470
1471 assign = SSA_NAME_DEF_STMT (tmp);
1472 assign_bb = gimple_bb (assign);
1473 if (!single_pred_p (assign_bb))
1474 return;
1475 expr = get_ancestor_addr_info (assign, &obj, &offset);
1476 if (!expr)
1477 return;
1478 parm = TREE_OPERAND (expr, 0);
1479 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1480 if (index < 0)
1481 return;
1482
1483 cond_bb = single_pred (assign_bb);
1484 cond = last_stmt (cond_bb);
1485 if (!cond
1486 || gimple_code (cond) != GIMPLE_COND
1487 || gimple_cond_code (cond) != NE_EXPR
1488 || gimple_cond_lhs (cond) != parm
1489 || !integer_zerop (gimple_cond_rhs (cond)))
1490 return;
1491
1492 phi_bb = gimple_bb (phi);
1493 for (i = 0; i < 2; i++)
1494 {
1495 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1496 if (pred != assign_bb && pred != cond_bb)
1497 return;
1498 }
1499
1500 bool type_p = false;
1501 if (param_type && POINTER_TYPE_P (param_type)
1502 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
1503 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1504 call, jfunc, offset);
1505 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1506 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1507 index,
1508 parm_ref_data_pass_through_p (fbi, index, call, parm),
1509 type_p);
1510 }
1511
1512 /* Given OP which is passed as an actual argument to a called function,
1513 determine if it is possible to construct a KNOWN_TYPE jump function for it
1514 and if so, create one and store it to JFUNC.
1515 EXPECTED_TYPE represents a type the argument should be in */
1516
1517 static void
1518 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1519 gimple call, tree expected_type)
1520 {
1521 HOST_WIDE_INT offset, size, max_size;
1522 tree base;
1523
1524 if (!flag_devirtualize
1525 || TREE_CODE (op) != ADDR_EXPR
1526 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
1527 /* Be sure expected_type is polymorphic. */
1528 || !expected_type
1529 || !contains_polymorphic_type_p (expected_type))
1530 return;
1531
1532 op = TREE_OPERAND (op, 0);
1533 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1534 if (!DECL_P (base)
1535 || max_size == -1
1536 || max_size != size
1537 || !contains_polymorphic_type_p (TREE_TYPE (base)))
1538 return;
1539
1540 if (decl_maybe_in_construction_p (base, TREE_TYPE (base),
1541 call, current_function_decl)
1542 /* Even if the var seems to be in construction by inline call stack,
1543 we may work out the actual type by walking memory writes. */
1544 && (is_global_var (base)
1545 || detect_type_change (op, base, expected_type, call, jfunc, offset)))
1546 return;
1547
1548 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1549 expected_type);
1550 }
1551
1552 /* Inspect the given TYPE and return true iff it has the same structure (the
1553 same number of fields of the same types) as a C++ member pointer. If
1554 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1555 corresponding fields there. */
1556
1557 static bool
1558 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1559 {
1560 tree fld;
1561
1562 if (TREE_CODE (type) != RECORD_TYPE)
1563 return false;
1564
1565 fld = TYPE_FIELDS (type);
1566 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1567 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1568 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1569 return false;
1570
1571 if (method_ptr)
1572 *method_ptr = fld;
1573
1574 fld = DECL_CHAIN (fld);
1575 if (!fld || INTEGRAL_TYPE_P (fld)
1576 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1577 return false;
1578 if (delta)
1579 *delta = fld;
1580
1581 if (DECL_CHAIN (fld))
1582 return false;
1583
1584 return true;
1585 }
1586
1587 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1588 return the rhs of its defining statement. Otherwise return RHS as it
1589 is. */
1590
1591 static inline tree
1592 get_ssa_def_if_simple_copy (tree rhs)
1593 {
1594 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1595 {
1596 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1597
1598 if (gimple_assign_single_p (def_stmt))
1599 rhs = gimple_assign_rhs1 (def_stmt);
1600 else
1601 break;
1602 }
1603 return rhs;
1604 }
1605
1606 /* Simple linked list, describing known contents of an aggregate beforere
1607 call. */
1608
1609 struct ipa_known_agg_contents_list
1610 {
1611 /* Offset and size of the described part of the aggregate. */
1612 HOST_WIDE_INT offset, size;
1613 /* Known constant value or NULL if the contents is known to be unknown. */
1614 tree constant;
1615 /* Pointer to the next structure in the list. */
1616 struct ipa_known_agg_contents_list *next;
1617 };
1618
1619 /* Find the proper place in linked list of ipa_known_agg_contents_list
1620 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1621 unless there is a partial overlap, in which case return NULL, or such
1622 element is already there, in which case set *ALREADY_THERE to true. */
1623
1624 static struct ipa_known_agg_contents_list **
1625 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1626 HOST_WIDE_INT lhs_offset,
1627 HOST_WIDE_INT lhs_size,
1628 bool *already_there)
1629 {
1630 struct ipa_known_agg_contents_list **p = list;
1631 while (*p && (*p)->offset < lhs_offset)
1632 {
1633 if ((*p)->offset + (*p)->size > lhs_offset)
1634 return NULL;
1635 p = &(*p)->next;
1636 }
1637
1638 if (*p && (*p)->offset < lhs_offset + lhs_size)
1639 {
1640 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1641 /* We already know this value is subsequently overwritten with
1642 something else. */
1643 *already_there = true;
1644 else
1645 /* Otherwise this is a partial overlap which we cannot
1646 represent. */
1647 return NULL;
1648 }
1649 return p;
1650 }
1651
1652 /* Build aggregate jump function from LIST, assuming there are exactly
1653 CONST_COUNT constant entries there and that th offset of the passed argument
1654 is ARG_OFFSET and store it into JFUNC. */
1655
1656 static void
1657 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1658 int const_count, HOST_WIDE_INT arg_offset,
1659 struct ipa_jump_func *jfunc)
1660 {
1661 vec_alloc (jfunc->agg.items, const_count);
1662 while (list)
1663 {
1664 if (list->constant)
1665 {
1666 struct ipa_agg_jf_item item;
1667 item.offset = list->offset - arg_offset;
1668 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1669 item.value = unshare_expr_without_location (list->constant);
1670 jfunc->agg.items->quick_push (item);
1671 }
1672 list = list->next;
1673 }
1674 }
1675
1676 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1677 in ARG is filled in with constant values. ARG can either be an aggregate
1678 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1679 aggregate. JFUNC is the jump function into which the constants are
1680 subsequently stored. */
1681
1682 static void
1683 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1684 struct ipa_jump_func *jfunc)
1685 {
1686 struct ipa_known_agg_contents_list *list = NULL;
1687 int item_count = 0, const_count = 0;
1688 HOST_WIDE_INT arg_offset, arg_size;
1689 gimple_stmt_iterator gsi;
1690 tree arg_base;
1691 bool check_ref, by_ref;
1692 ao_ref r;
1693
1694 /* The function operates in three stages. First, we prepare check_ref, r,
1695 arg_base and arg_offset based on what is actually passed as an actual
1696 argument. */
1697
1698 if (POINTER_TYPE_P (arg_type))
1699 {
1700 by_ref = true;
1701 if (TREE_CODE (arg) == SSA_NAME)
1702 {
1703 tree type_size;
1704 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1705 return;
1706 check_ref = true;
1707 arg_base = arg;
1708 arg_offset = 0;
1709 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1710 arg_size = tree_to_uhwi (type_size);
1711 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1712 }
1713 else if (TREE_CODE (arg) == ADDR_EXPR)
1714 {
1715 HOST_WIDE_INT arg_max_size;
1716
1717 arg = TREE_OPERAND (arg, 0);
1718 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1719 &arg_max_size);
1720 if (arg_max_size == -1
1721 || arg_max_size != arg_size
1722 || arg_offset < 0)
1723 return;
1724 if (DECL_P (arg_base))
1725 {
1726 check_ref = false;
1727 ao_ref_init (&r, arg_base);
1728 }
1729 else
1730 return;
1731 }
1732 else
1733 return;
1734 }
1735 else
1736 {
1737 HOST_WIDE_INT arg_max_size;
1738
1739 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1740
1741 by_ref = false;
1742 check_ref = false;
1743 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1744 &arg_max_size);
1745 if (arg_max_size == -1
1746 || arg_max_size != arg_size
1747 || arg_offset < 0)
1748 return;
1749
1750 ao_ref_init (&r, arg);
1751 }
1752
1753 /* Second stage walks back the BB, looks at individual statements and as long
1754 as it is confident of how the statements affect contents of the
1755 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1756 describing it. */
1757 gsi = gsi_for_stmt (call);
1758 gsi_prev (&gsi);
1759 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1760 {
1761 struct ipa_known_agg_contents_list *n, **p;
1762 gimple stmt = gsi_stmt (gsi);
1763 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1764 tree lhs, rhs, lhs_base;
1765
1766 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1767 continue;
1768 if (!gimple_assign_single_p (stmt))
1769 break;
1770
1771 lhs = gimple_assign_lhs (stmt);
1772 rhs = gimple_assign_rhs1 (stmt);
1773 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1774 || TREE_CODE (lhs) == BIT_FIELD_REF
1775 || contains_bitfld_component_ref_p (lhs))
1776 break;
1777
1778 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1779 &lhs_max_size);
1780 if (lhs_max_size == -1
1781 || lhs_max_size != lhs_size)
1782 break;
1783
1784 if (check_ref)
1785 {
1786 if (TREE_CODE (lhs_base) != MEM_REF
1787 || TREE_OPERAND (lhs_base, 0) != arg_base
1788 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1789 break;
1790 }
1791 else if (lhs_base != arg_base)
1792 {
1793 if (DECL_P (lhs_base))
1794 continue;
1795 else
1796 break;
1797 }
1798
1799 bool already_there = false;
1800 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1801 &already_there);
1802 if (!p)
1803 break;
1804 if (already_there)
1805 continue;
1806
1807 rhs = get_ssa_def_if_simple_copy (rhs);
1808 n = XALLOCA (struct ipa_known_agg_contents_list);
1809 n->size = lhs_size;
1810 n->offset = lhs_offset;
1811 if (is_gimple_ip_invariant (rhs))
1812 {
1813 n->constant = rhs;
1814 const_count++;
1815 }
1816 else
1817 n->constant = NULL_TREE;
1818 n->next = *p;
1819 *p = n;
1820
1821 item_count++;
1822 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1823 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1824 break;
1825 }
1826
1827 /* Third stage just goes over the list and creates an appropriate vector of
1828 ipa_agg_jf_item structures out of it, of sourse only if there are
1829 any known constants to begin with. */
1830
1831 if (const_count)
1832 {
1833 jfunc->agg.by_ref = by_ref;
1834 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1835 }
1836 }
1837
1838 static tree
1839 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1840 {
1841 int n;
1842 tree type = (e->callee
1843 ? TREE_TYPE (e->callee->decl)
1844 : gimple_call_fntype (e->call_stmt));
1845 tree t = TYPE_ARG_TYPES (type);
1846
1847 for (n = 0; n < i; n++)
1848 {
1849 if (!t)
1850 break;
1851 t = TREE_CHAIN (t);
1852 }
1853 if (t)
1854 return TREE_VALUE (t);
1855 if (!e->callee)
1856 return NULL;
1857 t = DECL_ARGUMENTS (e->callee->decl);
1858 for (n = 0; n < i; n++)
1859 {
1860 if (!t)
1861 return NULL;
1862 t = TREE_CHAIN (t);
1863 }
1864 if (t)
1865 return TREE_TYPE (t);
1866 return NULL;
1867 }
1868
1869 /* Compute jump function for all arguments of callsite CS and insert the
1870 information in the jump_functions array in the ipa_edge_args corresponding
1871 to this callsite. */
1872
1873 static void
1874 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1875 struct cgraph_edge *cs)
1876 {
1877 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1878 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1879 gimple call = cs->call_stmt;
1880 int n, arg_num = gimple_call_num_args (call);
1881 bool useful_context = false;
1882
1883 if (arg_num == 0 || args->jump_functions)
1884 return;
1885 vec_safe_grow_cleared (args->jump_functions, arg_num);
1886 if (flag_devirtualize)
1887 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1888
1889 if (gimple_call_internal_p (call))
1890 return;
1891 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1892 return;
1893
1894 for (n = 0; n < arg_num; n++)
1895 {
1896 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1897 tree arg = gimple_call_arg (call, n);
1898 tree param_type = ipa_get_callee_param_type (cs, n);
1899 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1900 {
1901 tree instance;
1902 struct ipa_polymorphic_call_context context (cs->caller->decl,
1903 arg, cs->call_stmt,
1904 &instance);
1905 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1906 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1907 if (!context.useless_p ())
1908 useful_context = true;
1909 }
1910
1911 if (is_gimple_ip_invariant (arg))
1912 ipa_set_jf_constant (jfunc, arg, cs);
1913 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1914 && TREE_CODE (arg) == PARM_DECL)
1915 {
1916 int index = ipa_get_param_decl_index (info, arg);
1917
1918 gcc_assert (index >=0);
1919 /* Aggregate passed by value, check for pass-through, otherwise we
1920 will attempt to fill in aggregate contents later in this
1921 for cycle. */
1922 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1923 {
1924 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1925 continue;
1926 }
1927 }
1928 else if (TREE_CODE (arg) == SSA_NAME)
1929 {
1930 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1931 {
1932 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1933 if (index >= 0)
1934 {
1935 bool agg_p, type_p;
1936 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1937 if (param_type && POINTER_TYPE_P (param_type))
1938 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1939 call, jfunc);
1940 else
1941 type_p = false;
1942 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1943 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1944 type_p);
1945 }
1946 }
1947 else
1948 {
1949 gimple stmt = SSA_NAME_DEF_STMT (arg);
1950 if (is_gimple_assign (stmt))
1951 compute_complex_assign_jump_func (fbi, info, jfunc,
1952 call, stmt, arg, param_type);
1953 else if (gimple_code (stmt) == GIMPLE_PHI)
1954 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1955 call, stmt, param_type);
1956 }
1957 }
1958 else
1959 compute_known_type_jump_func (arg, jfunc, call,
1960 param_type
1961 && POINTER_TYPE_P (param_type)
1962 ? TREE_TYPE (param_type)
1963 : NULL);
1964
1965 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1966 passed (because type conversions are ignored in gimple). Usually we can
1967 safely get type from function declaration, but in case of K&R prototypes or
1968 variadic functions we can try our luck with type of the pointer passed.
1969 TODO: Since we look for actual initialization of the memory object, we may better
1970 work out the type based on the memory stores we find. */
1971 if (!param_type)
1972 param_type = TREE_TYPE (arg);
1973
1974 if ((jfunc->type != IPA_JF_PASS_THROUGH
1975 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1976 && (jfunc->type != IPA_JF_ANCESTOR
1977 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1978 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1979 || POINTER_TYPE_P (param_type)))
1980 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1981 }
1982 if (!useful_context)
1983 vec_free (args->polymorphic_call_contexts);
1984 }
1985
1986 /* Compute jump functions for all edges - both direct and indirect - outgoing
1987 from BB. */
1988
1989 static void
1990 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1991 {
1992 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1993 int i;
1994 struct cgraph_edge *cs;
1995
1996 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1997 {
1998 struct cgraph_node *callee = cs->callee;
1999
2000 if (callee)
2001 {
2002 callee->ultimate_alias_target ();
2003 /* We do not need to bother analyzing calls to unknown functions
2004 unless they may become known during lto/whopr. */
2005 if (!callee->definition && !flag_lto)
2006 continue;
2007 }
2008 ipa_compute_jump_functions_for_edge (fbi, cs);
2009 }
2010 }
2011
2012 /* If STMT looks like a statement loading a value from a member pointer formal
2013 parameter, return that parameter and store the offset of the field to
2014 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2015 might be clobbered). If USE_DELTA, then we look for a use of the delta
2016 field rather than the pfn. */
2017
2018 static tree
2019 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
2020 HOST_WIDE_INT *offset_p)
2021 {
2022 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2023
2024 if (!gimple_assign_single_p (stmt))
2025 return NULL_TREE;
2026
2027 rhs = gimple_assign_rhs1 (stmt);
2028 if (TREE_CODE (rhs) == COMPONENT_REF)
2029 {
2030 ref_field = TREE_OPERAND (rhs, 1);
2031 rhs = TREE_OPERAND (rhs, 0);
2032 }
2033 else
2034 ref_field = NULL_TREE;
2035 if (TREE_CODE (rhs) != MEM_REF)
2036 return NULL_TREE;
2037 rec = TREE_OPERAND (rhs, 0);
2038 if (TREE_CODE (rec) != ADDR_EXPR)
2039 return NULL_TREE;
2040 rec = TREE_OPERAND (rec, 0);
2041 if (TREE_CODE (rec) != PARM_DECL
2042 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2043 return NULL_TREE;
2044 ref_offset = TREE_OPERAND (rhs, 1);
2045
2046 if (use_delta)
2047 fld = delta_field;
2048 else
2049 fld = ptr_field;
2050 if (offset_p)
2051 *offset_p = int_bit_position (fld);
2052
2053 if (ref_field)
2054 {
2055 if (integer_nonzerop (ref_offset))
2056 return NULL_TREE;
2057 return ref_field == fld ? rec : NULL_TREE;
2058 }
2059 else
2060 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2061 : NULL_TREE;
2062 }
2063
2064 /* Returns true iff T is an SSA_NAME defined by a statement. */
2065
2066 static bool
2067 ipa_is_ssa_with_stmt_def (tree t)
2068 {
2069 if (TREE_CODE (t) == SSA_NAME
2070 && !SSA_NAME_IS_DEFAULT_DEF (t))
2071 return true;
2072 else
2073 return false;
2074 }
2075
2076 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2077 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2078 indirect call graph edge. */
2079
2080 static struct cgraph_edge *
2081 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
2082 {
2083 struct cgraph_edge *cs;
2084
2085 cs = node->get_edge (stmt);
2086 cs->indirect_info->param_index = param_index;
2087 cs->indirect_info->agg_contents = 0;
2088 cs->indirect_info->member_ptr = 0;
2089 return cs;
2090 }
2091
2092 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2093 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2094 intermediate information about each formal parameter. Currently it checks
2095 whether the call calls a pointer that is a formal parameter and if so, the
2096 parameter is marked with the called flag and an indirect call graph edge
2097 describing the call is created. This is very simple for ordinary pointers
2098 represented in SSA but not-so-nice when it comes to member pointers. The
2099 ugly part of this function does nothing more than trying to match the
2100 pattern of such a call. An example of such a pattern is the gimple dump
2101 below, the call is on the last line:
2102
2103 <bb 2>:
2104 f$__delta_5 = f.__delta;
2105 f$__pfn_24 = f.__pfn;
2106
2107 or
2108 <bb 2>:
2109 f$__delta_5 = MEM[(struct *)&f];
2110 f$__pfn_24 = MEM[(struct *)&f + 4B];
2111
2112 and a few lines below:
2113
2114 <bb 5>
2115 D.2496_3 = (int) f$__pfn_24;
2116 D.2497_4 = D.2496_3 & 1;
2117 if (D.2497_4 != 0)
2118 goto <bb 3>;
2119 else
2120 goto <bb 4>;
2121
2122 <bb 6>:
2123 D.2500_7 = (unsigned int) f$__delta_5;
2124 D.2501_8 = &S + D.2500_7;
2125 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2126 D.2503_10 = *D.2502_9;
2127 D.2504_12 = f$__pfn_24 + -1;
2128 D.2505_13 = (unsigned int) D.2504_12;
2129 D.2506_14 = D.2503_10 + D.2505_13;
2130 D.2507_15 = *D.2506_14;
2131 iftmp.11_16 = (String:: *) D.2507_15;
2132
2133 <bb 7>:
2134 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2135 D.2500_19 = (unsigned int) f$__delta_5;
2136 D.2508_20 = &S + D.2500_19;
2137 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2138
2139 Such patterns are results of simple calls to a member pointer:
2140
2141 int doprinting (int (MyString::* f)(int) const)
2142 {
2143 MyString S ("somestring");
2144
2145 return (S.*f)(4);
2146 }
2147
2148 Moreover, the function also looks for called pointers loaded from aggregates
2149 passed by value or reference. */
2150
2151 static void
2152 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2153 tree target)
2154 {
2155 struct ipa_node_params *info = fbi->info;
2156 HOST_WIDE_INT offset;
2157 bool by_ref;
2158
2159 if (SSA_NAME_IS_DEFAULT_DEF (target))
2160 {
2161 tree var = SSA_NAME_VAR (target);
2162 int index = ipa_get_param_decl_index (info, var);
2163 if (index >= 0)
2164 ipa_note_param_call (fbi->node, index, call);
2165 return;
2166 }
2167
2168 int index;
2169 gimple def = SSA_NAME_DEF_STMT (target);
2170 if (gimple_assign_single_p (def)
2171 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2172 gimple_assign_rhs1 (def), &index, &offset,
2173 NULL, &by_ref))
2174 {
2175 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2176 cs->indirect_info->offset = offset;
2177 cs->indirect_info->agg_contents = 1;
2178 cs->indirect_info->by_ref = by_ref;
2179 return;
2180 }
2181
2182 /* Now we need to try to match the complex pattern of calling a member
2183 pointer. */
2184 if (gimple_code (def) != GIMPLE_PHI
2185 || gimple_phi_num_args (def) != 2
2186 || !POINTER_TYPE_P (TREE_TYPE (target))
2187 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2188 return;
2189
2190 /* First, we need to check whether one of these is a load from a member
2191 pointer that is a parameter to this function. */
2192 tree n1 = PHI_ARG_DEF (def, 0);
2193 tree n2 = PHI_ARG_DEF (def, 1);
2194 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2195 return;
2196 gimple d1 = SSA_NAME_DEF_STMT (n1);
2197 gimple d2 = SSA_NAME_DEF_STMT (n2);
2198
2199 tree rec;
2200 basic_block bb, virt_bb;
2201 basic_block join = gimple_bb (def);
2202 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2203 {
2204 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2205 return;
2206
2207 bb = EDGE_PRED (join, 0)->src;
2208 virt_bb = gimple_bb (d2);
2209 }
2210 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2211 {
2212 bb = EDGE_PRED (join, 1)->src;
2213 virt_bb = gimple_bb (d1);
2214 }
2215 else
2216 return;
2217
2218 /* Second, we need to check that the basic blocks are laid out in the way
2219 corresponding to the pattern. */
2220
2221 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2222 || single_pred (virt_bb) != bb
2223 || single_succ (virt_bb) != join)
2224 return;
2225
2226 /* Third, let's see that the branching is done depending on the least
2227 significant bit of the pfn. */
2228
2229 gimple branch = last_stmt (bb);
2230 if (!branch || gimple_code (branch) != GIMPLE_COND)
2231 return;
2232
2233 if ((gimple_cond_code (branch) != NE_EXPR
2234 && gimple_cond_code (branch) != EQ_EXPR)
2235 || !integer_zerop (gimple_cond_rhs (branch)))
2236 return;
2237
2238 tree cond = gimple_cond_lhs (branch);
2239 if (!ipa_is_ssa_with_stmt_def (cond))
2240 return;
2241
2242 def = SSA_NAME_DEF_STMT (cond);
2243 if (!is_gimple_assign (def)
2244 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2245 || !integer_onep (gimple_assign_rhs2 (def)))
2246 return;
2247
2248 cond = gimple_assign_rhs1 (def);
2249 if (!ipa_is_ssa_with_stmt_def (cond))
2250 return;
2251
2252 def = SSA_NAME_DEF_STMT (cond);
2253
2254 if (is_gimple_assign (def)
2255 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2256 {
2257 cond = gimple_assign_rhs1 (def);
2258 if (!ipa_is_ssa_with_stmt_def (cond))
2259 return;
2260 def = SSA_NAME_DEF_STMT (cond);
2261 }
2262
2263 tree rec2;
2264 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2265 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2266 == ptrmemfunc_vbit_in_delta),
2267 NULL);
2268 if (rec != rec2)
2269 return;
2270
2271 index = ipa_get_param_decl_index (info, rec);
2272 if (index >= 0
2273 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2274 {
2275 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2276 cs->indirect_info->offset = offset;
2277 cs->indirect_info->agg_contents = 1;
2278 cs->indirect_info->member_ptr = 1;
2279 }
2280
2281 return;
2282 }
2283
2284 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2285 object referenced in the expression is a formal parameter of the caller
2286 FBI->node (described by FBI->info), create a call note for the
2287 statement. */
2288
2289 static void
2290 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2291 gimple call, tree target)
2292 {
2293 tree obj = OBJ_TYPE_REF_OBJECT (target);
2294 int index;
2295 HOST_WIDE_INT anc_offset;
2296
2297 if (!flag_devirtualize)
2298 return;
2299
2300 if (TREE_CODE (obj) != SSA_NAME)
2301 return;
2302
2303 struct ipa_node_params *info = fbi->info;
2304 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2305 {
2306 struct ipa_jump_func jfunc;
2307 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2308 return;
2309
2310 anc_offset = 0;
2311 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2312 gcc_assert (index >= 0);
2313 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2314 call, &jfunc))
2315 return;
2316 }
2317 else
2318 {
2319 struct ipa_jump_func jfunc;
2320 gimple stmt = SSA_NAME_DEF_STMT (obj);
2321 tree expr;
2322
2323 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2324 if (!expr)
2325 return;
2326 index = ipa_get_param_decl_index (info,
2327 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2328 gcc_assert (index >= 0);
2329 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2330 call, &jfunc, anc_offset))
2331 return;
2332 }
2333
2334 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2335 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2336 ii->offset = anc_offset;
2337 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2338 ii->otr_type = obj_type_ref_class (target);
2339 ii->polymorphic = 1;
2340 }
2341
2342 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2343 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2344 containing intermediate information about each formal parameter. */
2345
2346 static void
2347 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2348 {
2349 tree target = gimple_call_fn (call);
2350
2351 if (!target
2352 || (TREE_CODE (target) != SSA_NAME
2353 && !virtual_method_call_p (target)))
2354 return;
2355
2356 struct cgraph_edge *cs = fbi->node->get_edge (call);
2357 /* If we previously turned the call into a direct call, there is
2358 no need to analyze. */
2359 if (cs && !cs->indirect_unknown_callee)
2360 return;
2361
2362 if (cs->indirect_info->polymorphic)
2363 {
2364 tree instance;
2365 tree target = gimple_call_fn (call);
2366 ipa_polymorphic_call_context context (current_function_decl,
2367 target, call, &instance);
2368
2369 gcc_checking_assert (cs->indirect_info->otr_type
2370 == obj_type_ref_class (target));
2371 gcc_checking_assert (cs->indirect_info->otr_token
2372 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2373
2374 if (context.get_dynamic_type (instance,
2375 OBJ_TYPE_REF_OBJECT (target),
2376 obj_type_ref_class (target), call))
2377 cs->indirect_info->context = context;
2378 }
2379
2380 if (TREE_CODE (target) == SSA_NAME)
2381 ipa_analyze_indirect_call_uses (fbi, call, target);
2382 else if (virtual_method_call_p (target))
2383 ipa_analyze_virtual_call_uses (fbi, call, target);
2384 }
2385
2386
2387 /* Analyze the call statement STMT with respect to formal parameters (described
2388 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2389 formal parameters are called. */
2390
2391 static void
2392 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2393 {
2394 if (is_gimple_call (stmt))
2395 ipa_analyze_call_uses (fbi, stmt);
2396 }
2397
2398 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2399 If OP is a parameter declaration, mark it as used in the info structure
2400 passed in DATA. */
2401
2402 static bool
2403 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2404 {
2405 struct ipa_node_params *info = (struct ipa_node_params *) data;
2406
2407 op = get_base_address (op);
2408 if (op
2409 && TREE_CODE (op) == PARM_DECL)
2410 {
2411 int index = ipa_get_param_decl_index (info, op);
2412 gcc_assert (index >= 0);
2413 ipa_set_param_used (info, index, true);
2414 }
2415
2416 return false;
2417 }
2418
2419 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2420 the findings in various structures of the associated ipa_node_params
2421 structure, such as parameter flags, notes etc. FBI holds various data about
2422 the function being analyzed. */
2423
2424 static void
2425 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2426 {
2427 gimple_stmt_iterator gsi;
2428 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2429 {
2430 gimple stmt = gsi_stmt (gsi);
2431
2432 if (is_gimple_debug (stmt))
2433 continue;
2434
2435 ipa_analyze_stmt_uses (fbi, stmt);
2436 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2437 visit_ref_for_mod_analysis,
2438 visit_ref_for_mod_analysis,
2439 visit_ref_for_mod_analysis);
2440 }
2441 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2442 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2443 visit_ref_for_mod_analysis,
2444 visit_ref_for_mod_analysis,
2445 visit_ref_for_mod_analysis);
2446 }
2447
2448 /* Calculate controlled uses of parameters of NODE. */
2449
2450 static void
2451 ipa_analyze_controlled_uses (struct cgraph_node *node)
2452 {
2453 struct ipa_node_params *info = IPA_NODE_REF (node);
2454
2455 for (int i = 0; i < ipa_get_param_count (info); i++)
2456 {
2457 tree parm = ipa_get_param (info, i);
2458 int controlled_uses = 0;
2459
2460 /* For SSA regs see if parameter is used. For non-SSA we compute
2461 the flag during modification analysis. */
2462 if (is_gimple_reg (parm))
2463 {
2464 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2465 parm);
2466 if (ddef && !has_zero_uses (ddef))
2467 {
2468 imm_use_iterator imm_iter;
2469 use_operand_p use_p;
2470
2471 ipa_set_param_used (info, i, true);
2472 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2473 if (!is_gimple_call (USE_STMT (use_p)))
2474 {
2475 if (!is_gimple_debug (USE_STMT (use_p)))
2476 {
2477 controlled_uses = IPA_UNDESCRIBED_USE;
2478 break;
2479 }
2480 }
2481 else
2482 controlled_uses++;
2483 }
2484 else
2485 controlled_uses = 0;
2486 }
2487 else
2488 controlled_uses = IPA_UNDESCRIBED_USE;
2489 ipa_set_controlled_uses (info, i, controlled_uses);
2490 }
2491 }
2492
2493 /* Free stuff in BI. */
2494
2495 static void
2496 free_ipa_bb_info (struct ipa_bb_info *bi)
2497 {
2498 bi->cg_edges.release ();
2499 bi->param_aa_statuses.release ();
2500 }
2501
2502 /* Dominator walker driving the analysis. */
2503
2504 class analysis_dom_walker : public dom_walker
2505 {
2506 public:
2507 analysis_dom_walker (struct func_body_info *fbi)
2508 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2509
2510 virtual void before_dom_children (basic_block);
2511
2512 private:
2513 struct func_body_info *m_fbi;
2514 };
2515
2516 void
2517 analysis_dom_walker::before_dom_children (basic_block bb)
2518 {
2519 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2520 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2521 }
2522
2523 /* Initialize the array describing properties of of formal parameters
2524 of NODE, analyze their uses and compute jump functions associated
2525 with actual arguments of calls from within NODE. */
2526
2527 void
2528 ipa_analyze_node (struct cgraph_node *node)
2529 {
2530 struct func_body_info fbi;
2531 struct ipa_node_params *info;
2532
2533 ipa_check_create_node_params ();
2534 ipa_check_create_edge_args ();
2535 info = IPA_NODE_REF (node);
2536
2537 if (info->analysis_done)
2538 return;
2539 info->analysis_done = 1;
2540
2541 if (ipa_func_spec_opts_forbid_analysis_p (node))
2542 {
2543 for (int i = 0; i < ipa_get_param_count (info); i++)
2544 {
2545 ipa_set_param_used (info, i, true);
2546 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2547 }
2548 return;
2549 }
2550
2551 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2552 push_cfun (func);
2553 calculate_dominance_info (CDI_DOMINATORS);
2554 ipa_initialize_node_params (node);
2555 ipa_analyze_controlled_uses (node);
2556
2557 fbi.node = node;
2558 fbi.info = IPA_NODE_REF (node);
2559 fbi.bb_infos = vNULL;
2560 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2561 fbi.param_count = ipa_get_param_count (info);
2562 fbi.aa_walked = 0;
2563
2564 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2565 {
2566 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2567 bi->cg_edges.safe_push (cs);
2568 }
2569
2570 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2571 {
2572 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2573 bi->cg_edges.safe_push (cs);
2574 }
2575
2576 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2577
2578 int i;
2579 struct ipa_bb_info *bi;
2580 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2581 free_ipa_bb_info (bi);
2582 fbi.bb_infos.release ();
2583 free_dominance_info (CDI_DOMINATORS);
2584 pop_cfun ();
2585 }
2586
2587 /* Update the jump function DST when the call graph edge corresponding to SRC is
2588 is being inlined, knowing that DST is of type ancestor and src of known
2589 type. */
2590
2591 static void
2592 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2593 struct ipa_jump_func *dst)
2594 {
2595 HOST_WIDE_INT combined_offset;
2596 tree combined_type;
2597
2598 if (!ipa_get_jf_ancestor_type_preserved (dst))
2599 {
2600 dst->type = IPA_JF_UNKNOWN;
2601 return;
2602 }
2603
2604 combined_offset = ipa_get_jf_known_type_offset (src)
2605 + ipa_get_jf_ancestor_offset (dst);
2606 combined_type = ipa_get_jf_ancestor_type (dst);
2607
2608 ipa_set_jf_known_type (dst, combined_offset,
2609 ipa_get_jf_known_type_base_type (src),
2610 combined_type);
2611 }
2612
2613 /* Update the jump functions associated with call graph edge E when the call
2614 graph edge CS is being inlined, assuming that E->caller is already (possibly
2615 indirectly) inlined into CS->callee and that E has not been inlined. */
2616
2617 static void
2618 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2619 struct cgraph_edge *e)
2620 {
2621 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2622 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2623 int count = ipa_get_cs_argument_count (args);
2624 int i;
2625
2626 for (i = 0; i < count; i++)
2627 {
2628 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2629 struct ipa_polymorphic_call_context *dst_ctx
2630 = ipa_get_ith_polymorhic_call_context (args, i);
2631
2632 if (dst->type == IPA_JF_ANCESTOR)
2633 {
2634 struct ipa_jump_func *src;
2635 int dst_fid = dst->value.ancestor.formal_id;
2636 struct ipa_polymorphic_call_context *src_ctx
2637 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2638
2639 /* Variable number of arguments can cause havoc if we try to access
2640 one that does not exist in the inlined edge. So make sure we
2641 don't. */
2642 if (dst_fid >= ipa_get_cs_argument_count (top))
2643 {
2644 dst->type = IPA_JF_UNKNOWN;
2645 continue;
2646 }
2647
2648 src = ipa_get_ith_jump_func (top, dst_fid);
2649
2650 if (src_ctx && !src_ctx->useless_p ())
2651 {
2652 struct ipa_polymorphic_call_context ctx = *src_ctx;
2653
2654 /* TODO: Make type preserved safe WRT contexts. */
2655 if (!dst->value.ancestor.agg_preserved)
2656 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2657 ctx.offset_by (dst->value.ancestor.offset);
2658 if (!ctx.useless_p ())
2659 {
2660 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2661 count);
2662 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2663 }
2664 }
2665
2666 if (src->agg.items
2667 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2668 {
2669 struct ipa_agg_jf_item *item;
2670 int j;
2671
2672 /* Currently we do not produce clobber aggregate jump functions,
2673 replace with merging when we do. */
2674 gcc_assert (!dst->agg.items);
2675
2676 dst->agg.items = vec_safe_copy (src->agg.items);
2677 dst->agg.by_ref = src->agg.by_ref;
2678 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2679 item->offset -= dst->value.ancestor.offset;
2680 }
2681
2682 if (src->type == IPA_JF_KNOWN_TYPE)
2683 combine_known_type_and_ancestor_jfs (src, dst);
2684 else if (src->type == IPA_JF_PASS_THROUGH
2685 && src->value.pass_through.operation == NOP_EXPR)
2686 {
2687 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2688 dst->value.ancestor.agg_preserved &=
2689 src->value.pass_through.agg_preserved;
2690 dst->value.ancestor.type_preserved &=
2691 src->value.pass_through.type_preserved;
2692 }
2693 else if (src->type == IPA_JF_ANCESTOR)
2694 {
2695 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2696 dst->value.ancestor.offset += src->value.ancestor.offset;
2697 dst->value.ancestor.agg_preserved &=
2698 src->value.ancestor.agg_preserved;
2699 dst->value.ancestor.type_preserved &=
2700 src->value.ancestor.type_preserved;
2701 }
2702 else
2703 dst->type = IPA_JF_UNKNOWN;
2704 }
2705 else if (dst->type == IPA_JF_PASS_THROUGH)
2706 {
2707 struct ipa_jump_func *src;
2708 /* We must check range due to calls with variable number of arguments
2709 and we cannot combine jump functions with operations. */
2710 if (dst->value.pass_through.operation == NOP_EXPR
2711 && (dst->value.pass_through.formal_id
2712 < ipa_get_cs_argument_count (top)))
2713 {
2714 int dst_fid = dst->value.pass_through.formal_id;
2715 src = ipa_get_ith_jump_func (top, dst_fid);
2716 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2717 struct ipa_polymorphic_call_context *src_ctx
2718 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2719
2720 if (src_ctx && !src_ctx->useless_p ())
2721 {
2722 struct ipa_polymorphic_call_context ctx = *src_ctx;
2723
2724 /* TODO: Make type preserved safe WRT contexts. */
2725 if (!dst->value.ancestor.agg_preserved)
2726 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2727 if (!ctx.useless_p ())
2728 {
2729 if (!dst_ctx)
2730 {
2731 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2732 count);
2733 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2734 }
2735 dst_ctx->combine_with (ctx);
2736 }
2737 }
2738 switch (src->type)
2739 {
2740 case IPA_JF_UNKNOWN:
2741 dst->type = IPA_JF_UNKNOWN;
2742 break;
2743 case IPA_JF_KNOWN_TYPE:
2744 if (ipa_get_jf_pass_through_type_preserved (dst))
2745 ipa_set_jf_known_type (dst,
2746 ipa_get_jf_known_type_offset (src),
2747 ipa_get_jf_known_type_base_type (src),
2748 ipa_get_jf_known_type_component_type (src));
2749 else
2750 dst->type = IPA_JF_UNKNOWN;
2751 break;
2752 case IPA_JF_CONST:
2753 ipa_set_jf_cst_copy (dst, src);
2754 break;
2755
2756 case IPA_JF_PASS_THROUGH:
2757 {
2758 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2759 enum tree_code operation;
2760 operation = ipa_get_jf_pass_through_operation (src);
2761
2762 if (operation == NOP_EXPR)
2763 {
2764 bool agg_p, type_p;
2765 agg_p = dst_agg_p
2766 && ipa_get_jf_pass_through_agg_preserved (src);
2767 type_p = ipa_get_jf_pass_through_type_preserved (src)
2768 && ipa_get_jf_pass_through_type_preserved (dst);
2769 ipa_set_jf_simple_pass_through (dst, formal_id,
2770 agg_p, type_p);
2771 }
2772 else
2773 {
2774 tree operand = ipa_get_jf_pass_through_operand (src);
2775 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2776 operation);
2777 }
2778 break;
2779 }
2780 case IPA_JF_ANCESTOR:
2781 {
2782 bool agg_p, type_p;
2783 agg_p = dst_agg_p
2784 && ipa_get_jf_ancestor_agg_preserved (src);
2785 type_p = ipa_get_jf_ancestor_type_preserved (src)
2786 && ipa_get_jf_pass_through_type_preserved (dst);
2787 ipa_set_ancestor_jf (dst,
2788 ipa_get_jf_ancestor_offset (src),
2789 ipa_get_jf_ancestor_type (src),
2790 ipa_get_jf_ancestor_formal_id (src),
2791 agg_p, type_p);
2792 break;
2793 }
2794 default:
2795 gcc_unreachable ();
2796 }
2797
2798 if (src->agg.items
2799 && (dst_agg_p || !src->agg.by_ref))
2800 {
2801 /* Currently we do not produce clobber aggregate jump
2802 functions, replace with merging when we do. */
2803 gcc_assert (!dst->agg.items);
2804
2805 dst->agg.by_ref = src->agg.by_ref;
2806 dst->agg.items = vec_safe_copy (src->agg.items);
2807 }
2808 }
2809 else
2810 dst->type = IPA_JF_UNKNOWN;
2811 }
2812 }
2813 }
2814
2815 /* If TARGET is an addr_expr of a function declaration, make it the
2816 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2817 Otherwise, return NULL. */
2818
2819 struct cgraph_edge *
2820 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2821 bool speculative)
2822 {
2823 struct cgraph_node *callee;
2824 struct inline_edge_summary *es = inline_edge_summary (ie);
2825 bool unreachable = false;
2826
2827 if (TREE_CODE (target) == ADDR_EXPR)
2828 target = TREE_OPERAND (target, 0);
2829 if (TREE_CODE (target) != FUNCTION_DECL)
2830 {
2831 target = canonicalize_constructor_val (target, NULL);
2832 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2833 {
2834 if (ie->indirect_info->member_ptr)
2835 /* Member pointer call that goes through a VMT lookup. */
2836 return NULL;
2837
2838 if (dump_enabled_p ())
2839 {
2840 location_t loc = gimple_location_safe (ie->call_stmt);
2841 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2842 "discovered direct call to non-function in %s/%i, "
2843 "making it __builtin_unreachable\n",
2844 ie->caller->name (), ie->caller->order);
2845 }
2846
2847 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2848 callee = cgraph_node::get_create (target);
2849 unreachable = true;
2850 }
2851 else
2852 callee = cgraph_node::get (target);
2853 }
2854 else
2855 callee = cgraph_node::get (target);
2856
2857 /* Because may-edges are not explicitely represented and vtable may be external,
2858 we may create the first reference to the object in the unit. */
2859 if (!callee || callee->global.inlined_to)
2860 {
2861
2862 /* We are better to ensure we can refer to it.
2863 In the case of static functions we are out of luck, since we already
2864 removed its body. In the case of public functions we may or may
2865 not introduce the reference. */
2866 if (!canonicalize_constructor_val (target, NULL)
2867 || !TREE_PUBLIC (target))
2868 {
2869 if (dump_file)
2870 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2871 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2872 xstrdup (ie->caller->name ()),
2873 ie->caller->order,
2874 xstrdup (ie->callee->name ()),
2875 ie->callee->order);
2876 return NULL;
2877 }
2878 callee = cgraph_node::get_create (target);
2879 }
2880
2881 if (!dbg_cnt (devirt))
2882 return NULL;
2883
2884 ipa_check_create_node_params ();
2885
2886 /* We can not make edges to inline clones. It is bug that someone removed
2887 the cgraph node too early. */
2888 gcc_assert (!callee->global.inlined_to);
2889
2890 if (dump_file && !unreachable)
2891 {
2892 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2893 "(%s/%i -> %s/%i), for stmt ",
2894 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2895 speculative ? "speculative" : "known",
2896 xstrdup (ie->caller->name ()),
2897 ie->caller->order,
2898 xstrdup (callee->name ()),
2899 callee->order);
2900 if (ie->call_stmt)
2901 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2902 else
2903 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2904 }
2905 if (dump_enabled_p ())
2906 {
2907 location_t loc = gimple_location_safe (ie->call_stmt);
2908
2909 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2910 "converting indirect call in %s to direct call to %s\n",
2911 ie->caller->name (), callee->name ());
2912 }
2913 if (!speculative)
2914 ie = ie->make_direct (callee);
2915 else
2916 {
2917 if (!callee->can_be_discarded_p ())
2918 {
2919 cgraph_node *alias;
2920 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2921 if (alias)
2922 callee = alias;
2923 }
2924 ie = ie->make_speculative
2925 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2926 }
2927 es = inline_edge_summary (ie);
2928 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2929 - eni_size_weights.call_cost);
2930 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2931 - eni_time_weights.call_cost);
2932
2933 return ie;
2934 }
2935
2936 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2937 return NULL if there is not any. BY_REF specifies whether the value has to
2938 be passed by reference or by value. */
2939
2940 tree
2941 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2942 HOST_WIDE_INT offset, bool by_ref)
2943 {
2944 struct ipa_agg_jf_item *item;
2945 int i;
2946
2947 if (by_ref != agg->by_ref)
2948 return NULL;
2949
2950 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2951 if (item->offset == offset)
2952 {
2953 /* Currently we do not have clobber values, return NULL for them once
2954 we do. */
2955 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2956 return item->value;
2957 }
2958 return NULL;
2959 }
2960
2961 /* Remove a reference to SYMBOL from the list of references of a node given by
2962 reference description RDESC. Return true if the reference has been
2963 successfully found and removed. */
2964
2965 static bool
2966 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2967 {
2968 struct ipa_ref *to_del;
2969 struct cgraph_edge *origin;
2970
2971 origin = rdesc->cs;
2972 if (!origin)
2973 return false;
2974 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2975 origin->lto_stmt_uid);
2976 if (!to_del)
2977 return false;
2978
2979 to_del->remove_reference ();
2980 if (dump_file)
2981 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2982 xstrdup (origin->caller->name ()),
2983 origin->caller->order, xstrdup (symbol->name ()));
2984 return true;
2985 }
2986
2987 /* If JFUNC has a reference description with refcount different from
2988 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2989 NULL. JFUNC must be a constant jump function. */
2990
2991 static struct ipa_cst_ref_desc *
2992 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2993 {
2994 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2995 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2996 return rdesc;
2997 else
2998 return NULL;
2999 }
3000
3001 /* If the value of constant jump function JFUNC is an address of a function
3002 declaration, return the associated call graph node. Otherwise return
3003 NULL. */
3004
3005 static cgraph_node *
3006 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3007 {
3008 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3009 tree cst = ipa_get_jf_constant (jfunc);
3010 if (TREE_CODE (cst) != ADDR_EXPR
3011 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3012 return NULL;
3013
3014 return cgraph_node::get (TREE_OPERAND (cst, 0));
3015 }
3016
3017
3018 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3019 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3020 the edge specified in the rdesc. Return false if either the symbol or the
3021 reference could not be found, otherwise return true. */
3022
3023 static bool
3024 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3025 {
3026 struct ipa_cst_ref_desc *rdesc;
3027 if (jfunc->type == IPA_JF_CONST
3028 && (rdesc = jfunc_rdesc_usable (jfunc))
3029 && --rdesc->refcount == 0)
3030 {
3031 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3032 if (!symbol)
3033 return false;
3034
3035 return remove_described_reference (symbol, rdesc);
3036 }
3037 return true;
3038 }
3039
3040 /* Try to find a destination for indirect edge IE that corresponds to a simple
3041 call or a call of a member function pointer and where the destination is a
3042 pointer formal parameter described by jump function JFUNC. If it can be
3043 determined, return the newly direct edge, otherwise return NULL.
3044 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3045
3046 static struct cgraph_edge *
3047 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3048 struct ipa_jump_func *jfunc,
3049 struct ipa_node_params *new_root_info)
3050 {
3051 struct cgraph_edge *cs;
3052 tree target;
3053 bool agg_contents = ie->indirect_info->agg_contents;
3054
3055 if (ie->indirect_info->agg_contents)
3056 target = ipa_find_agg_cst_for_param (&jfunc->agg,
3057 ie->indirect_info->offset,
3058 ie->indirect_info->by_ref);
3059 else
3060 target = ipa_value_from_jfunc (new_root_info, jfunc);
3061 if (!target)
3062 return NULL;
3063 cs = ipa_make_edge_direct_to_target (ie, target);
3064
3065 if (cs && !agg_contents)
3066 {
3067 bool ok;
3068 gcc_checking_assert (cs->callee
3069 && (cs != ie
3070 || jfunc->type != IPA_JF_CONST
3071 || !cgraph_node_for_jfunc (jfunc)
3072 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3073 ok = try_decrement_rdesc_refcount (jfunc);
3074 gcc_checking_assert (ok);
3075 }
3076
3077 return cs;
3078 }
3079
3080 /* Return the target to be used in cases of impossible devirtualization. IE
3081 and target (the latter can be NULL) are dumped when dumping is enabled. */
3082
3083 tree
3084 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3085 {
3086 if (dump_file)
3087 {
3088 if (target)
3089 fprintf (dump_file,
3090 "Type inconsistent devirtualization: %s/%i->%s\n",
3091 ie->caller->name (), ie->caller->order,
3092 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3093 else
3094 fprintf (dump_file,
3095 "No devirtualization target in %s/%i\n",
3096 ie->caller->name (), ie->caller->order);
3097 }
3098 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3099 cgraph_node::get_create (new_target);
3100 return new_target;
3101 }
3102
3103 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3104 call based on a formal parameter which is described by jump function JFUNC
3105 and if it can be determined, make it direct and return the direct edge.
3106 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3107 are relative to. */
3108
3109 static struct cgraph_edge *
3110 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3111 struct ipa_jump_func *jfunc,
3112 struct ipa_node_params *new_root_info,
3113 struct ipa_polymorphic_call_context *ctx_ptr)
3114 {
3115 tree binfo, target = NULL;
3116 bool speculative = false;
3117 bool updated = false;
3118
3119 if (!flag_devirtualize)
3120 return NULL;
3121
3122 /* If this is call of a function parameter, restrict its type
3123 based on knowlede of the context. */
3124 if (ctx_ptr && !ie->indirect_info->by_ref)
3125 {
3126 struct ipa_polymorphic_call_context ctx = *ctx_ptr;
3127
3128 ctx.offset_by (ie->indirect_info->offset);
3129
3130 /* TODO: We want to record if type change happens.
3131 Old code did not do that that seems like a bug. */
3132 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3133 ie->indirect_info->otr_type);
3134
3135 updated = ie->indirect_info->context.combine_with
3136 (ctx, ie->indirect_info->otr_type);
3137 }
3138
3139 /* Try to do lookup via known virtual table pointer value. */
3140 if (!ie->indirect_info->by_ref)
3141 {
3142 tree vtable;
3143 unsigned HOST_WIDE_INT offset;
3144 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
3145 ie->indirect_info->offset,
3146 true);
3147 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3148 {
3149 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3150 vtable, offset);
3151 if (target)
3152 {
3153 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
3154 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
3155 || !possible_polymorphic_call_target_p
3156 (ie, cgraph_node::get (target)))
3157 target = ipa_impossible_devirt_target (ie, target);
3158 return ipa_make_edge_direct_to_target (ie, target);
3159 }
3160 }
3161 }
3162
3163 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
3164
3165 if (binfo && TREE_CODE (binfo) != TREE_BINFO)
3166 {
3167 struct ipa_polymorphic_call_context ctx (binfo,
3168 ie->indirect_info->otr_type,
3169 ie->indirect_info->offset);
3170 updated |= ie->indirect_info->context.combine_with
3171 (ctx, ie->indirect_info->otr_type);
3172 }
3173
3174 if (updated)
3175 {
3176 ipa_polymorphic_call_context context (ie);
3177 vec <cgraph_node *>targets;
3178 bool final;
3179
3180 targets = possible_polymorphic_call_targets
3181 (ie->indirect_info->otr_type,
3182 ie->indirect_info->otr_token,
3183 context, &final);
3184 if (final && targets.length () <= 1)
3185 {
3186 if (targets.length () == 1)
3187 target = targets[0]->decl;
3188 else
3189 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3190 }
3191 else if (flag_devirtualize_speculatively
3192 && !ie->speculative && ie->maybe_hot_p ())
3193 {
3194 cgraph_node *n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3195 ie->indirect_info->otr_token,
3196 ie->indirect_info->context);
3197 if (n)
3198 {
3199 target = n->decl;
3200 speculative = true;
3201 }
3202 }
3203 }
3204
3205 if (binfo && TREE_CODE (binfo) == TREE_BINFO)
3206 {
3207 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3208 ie->indirect_info->otr_type);
3209 if (binfo)
3210 {
3211 tree t = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3212 binfo);
3213 if (t)
3214 {
3215 gcc_assert (!target || speculative || target == t);
3216 target = t;
3217 speculative = false;
3218 }
3219 }
3220 }
3221
3222 if (target)
3223 {
3224 if (!possible_polymorphic_call_target_p (ie, cgraph_node::get_create (target)))
3225 target = ipa_impossible_devirt_target (ie, target);
3226 return ipa_make_edge_direct_to_target (ie, target, speculative);
3227 }
3228 else
3229 return NULL;
3230 }
3231
3232 /* Update the param called notes associated with NODE when CS is being inlined,
3233 assuming NODE is (potentially indirectly) inlined into CS->callee.
3234 Moreover, if the callee is discovered to be constant, create a new cgraph
3235 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3236 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3237
3238 static bool
3239 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3240 struct cgraph_node *node,
3241 vec<cgraph_edge *> *new_edges)
3242 {
3243 struct ipa_edge_args *top;
3244 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3245 struct ipa_node_params *new_root_info;
3246 bool res = false;
3247
3248 ipa_check_create_edge_args ();
3249 top = IPA_EDGE_REF (cs);
3250 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3251 ? cs->caller->global.inlined_to
3252 : cs->caller);
3253
3254 for (ie = node->indirect_calls; ie; ie = next_ie)
3255 {
3256 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3257 struct ipa_jump_func *jfunc;
3258 int param_index;
3259
3260 next_ie = ie->next_callee;
3261
3262 if (ici->param_index == -1)
3263 continue;
3264
3265 /* We must check range due to calls with variable number of arguments: */
3266 if (ici->param_index >= ipa_get_cs_argument_count (top))
3267 {
3268 ici->param_index = -1;
3269 continue;
3270 }
3271
3272 param_index = ici->param_index;
3273 jfunc = ipa_get_ith_jump_func (top, param_index);
3274
3275 if (!flag_indirect_inlining)
3276 new_direct_edge = NULL;
3277 else if (ici->polymorphic)
3278 {
3279 ipa_polymorphic_call_context *ctx;
3280 ctx = ipa_get_ith_polymorhic_call_context (top, param_index);
3281 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3282 new_root_info,
3283 ctx);
3284 }
3285 else
3286 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3287 new_root_info);
3288 /* If speculation was removed, then we need to do nothing. */
3289 if (new_direct_edge && new_direct_edge != ie)
3290 {
3291 new_direct_edge->indirect_inlining_edge = 1;
3292 top = IPA_EDGE_REF (cs);
3293 res = true;
3294 }
3295 else if (new_direct_edge)
3296 {
3297 new_direct_edge->indirect_inlining_edge = 1;
3298 if (new_direct_edge->call_stmt)
3299 new_direct_edge->call_stmt_cannot_inline_p
3300 = !gimple_check_call_matching_types (
3301 new_direct_edge->call_stmt,
3302 new_direct_edge->callee->decl, false);
3303 if (new_edges)
3304 {
3305 new_edges->safe_push (new_direct_edge);
3306 res = true;
3307 }
3308 top = IPA_EDGE_REF (cs);
3309 }
3310 else if (jfunc->type == IPA_JF_PASS_THROUGH
3311 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3312 {
3313 if ((ici->agg_contents
3314 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3315 || (ici->polymorphic
3316 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3317 ici->param_index = -1;
3318 else
3319 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3320 }
3321 else if (jfunc->type == IPA_JF_ANCESTOR)
3322 {
3323 if ((ici->agg_contents
3324 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3325 || (ici->polymorphic
3326 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3327 ici->param_index = -1;
3328 else
3329 {
3330 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3331 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3332 }
3333 }
3334 else
3335 /* Either we can find a destination for this edge now or never. */
3336 ici->param_index = -1;
3337 }
3338
3339 return res;
3340 }
3341
3342 /* Recursively traverse subtree of NODE (including node) made of inlined
3343 cgraph_edges when CS has been inlined and invoke
3344 update_indirect_edges_after_inlining on all nodes and
3345 update_jump_functions_after_inlining on all non-inlined edges that lead out
3346 of this subtree. Newly discovered indirect edges will be added to
3347 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3348 created. */
3349
3350 static bool
3351 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3352 struct cgraph_node *node,
3353 vec<cgraph_edge *> *new_edges)
3354 {
3355 struct cgraph_edge *e;
3356 bool res;
3357
3358 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3359
3360 for (e = node->callees; e; e = e->next_callee)
3361 if (!e->inline_failed)
3362 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3363 else
3364 update_jump_functions_after_inlining (cs, e);
3365 for (e = node->indirect_calls; e; e = e->next_callee)
3366 update_jump_functions_after_inlining (cs, e);
3367
3368 return res;
3369 }
3370
3371 /* Combine two controlled uses counts as done during inlining. */
3372
3373 static int
3374 combine_controlled_uses_counters (int c, int d)
3375 {
3376 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3377 return IPA_UNDESCRIBED_USE;
3378 else
3379 return c + d - 1;
3380 }
3381
3382 /* Propagate number of controlled users from CS->caleee to the new root of the
3383 tree of inlined nodes. */
3384
3385 static void
3386 propagate_controlled_uses (struct cgraph_edge *cs)
3387 {
3388 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3389 struct cgraph_node *new_root = cs->caller->global.inlined_to
3390 ? cs->caller->global.inlined_to : cs->caller;
3391 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3392 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3393 int count, i;
3394
3395 count = MIN (ipa_get_cs_argument_count (args),
3396 ipa_get_param_count (old_root_info));
3397 for (i = 0; i < count; i++)
3398 {
3399 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3400 struct ipa_cst_ref_desc *rdesc;
3401
3402 if (jf->type == IPA_JF_PASS_THROUGH)
3403 {
3404 int src_idx, c, d;
3405 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3406 c = ipa_get_controlled_uses (new_root_info, src_idx);
3407 d = ipa_get_controlled_uses (old_root_info, i);
3408
3409 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3410 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3411 c = combine_controlled_uses_counters (c, d);
3412 ipa_set_controlled_uses (new_root_info, src_idx, c);
3413 if (c == 0 && new_root_info->ipcp_orig_node)
3414 {
3415 struct cgraph_node *n;
3416 struct ipa_ref *ref;
3417 tree t = new_root_info->known_vals[src_idx];
3418
3419 if (t && TREE_CODE (t) == ADDR_EXPR
3420 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3421 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3422 && (ref = new_root->find_reference (n, NULL, 0)))
3423 {
3424 if (dump_file)
3425 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3426 "reference from %s/%i to %s/%i.\n",
3427 xstrdup (new_root->name ()),
3428 new_root->order,
3429 xstrdup (n->name ()), n->order);
3430 ref->remove_reference ();
3431 }
3432 }
3433 }
3434 else if (jf->type == IPA_JF_CONST
3435 && (rdesc = jfunc_rdesc_usable (jf)))
3436 {
3437 int d = ipa_get_controlled_uses (old_root_info, i);
3438 int c = rdesc->refcount;
3439 rdesc->refcount = combine_controlled_uses_counters (c, d);
3440 if (rdesc->refcount == 0)
3441 {
3442 tree cst = ipa_get_jf_constant (jf);
3443 struct cgraph_node *n;
3444 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3445 && TREE_CODE (TREE_OPERAND (cst, 0))
3446 == FUNCTION_DECL);
3447 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3448 if (n)
3449 {
3450 struct cgraph_node *clone;
3451 bool ok;
3452 ok = remove_described_reference (n, rdesc);
3453 gcc_checking_assert (ok);
3454
3455 clone = cs->caller;
3456 while (clone->global.inlined_to
3457 && clone != rdesc->cs->caller
3458 && IPA_NODE_REF (clone)->ipcp_orig_node)
3459 {
3460 struct ipa_ref *ref;
3461 ref = clone->find_reference (n, NULL, 0);
3462 if (ref)
3463 {
3464 if (dump_file)
3465 fprintf (dump_file, "ipa-prop: Removing "
3466 "cloning-created reference "
3467 "from %s/%i to %s/%i.\n",
3468 xstrdup (clone->name ()),
3469 clone->order,
3470 xstrdup (n->name ()),
3471 n->order);
3472 ref->remove_reference ();
3473 }
3474 clone = clone->callers->caller;
3475 }
3476 }
3477 }
3478 }
3479 }
3480
3481 for (i = ipa_get_param_count (old_root_info);
3482 i < ipa_get_cs_argument_count (args);
3483 i++)
3484 {
3485 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3486
3487 if (jf->type == IPA_JF_CONST)
3488 {
3489 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3490 if (rdesc)
3491 rdesc->refcount = IPA_UNDESCRIBED_USE;
3492 }
3493 else if (jf->type == IPA_JF_PASS_THROUGH)
3494 ipa_set_controlled_uses (new_root_info,
3495 jf->value.pass_through.formal_id,
3496 IPA_UNDESCRIBED_USE);
3497 }
3498 }
3499
3500 /* Update jump functions and call note functions on inlining the call site CS.
3501 CS is expected to lead to a node already cloned by
3502 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3503 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3504 created. */
3505
3506 bool
3507 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3508 vec<cgraph_edge *> *new_edges)
3509 {
3510 bool changed;
3511 /* Do nothing if the preparation phase has not been carried out yet
3512 (i.e. during early inlining). */
3513 if (!ipa_node_params_vector.exists ())
3514 return false;
3515 gcc_assert (ipa_edge_args_vector);
3516
3517 propagate_controlled_uses (cs);
3518 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3519
3520 return changed;
3521 }
3522
3523 /* Frees all dynamically allocated structures that the argument info points
3524 to. */
3525
3526 void
3527 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3528 {
3529 vec_free (args->jump_functions);
3530 memset (args, 0, sizeof (*args));
3531 }
3532
3533 /* Free all ipa_edge structures. */
3534
3535 void
3536 ipa_free_all_edge_args (void)
3537 {
3538 int i;
3539 struct ipa_edge_args *args;
3540
3541 if (!ipa_edge_args_vector)
3542 return;
3543
3544 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3545 ipa_free_edge_args_substructures (args);
3546
3547 vec_free (ipa_edge_args_vector);
3548 }
3549
3550 /* Frees all dynamically allocated structures that the param info points
3551 to. */
3552
3553 void
3554 ipa_free_node_params_substructures (struct ipa_node_params *info)
3555 {
3556 info->descriptors.release ();
3557 free (info->lattices);
3558 /* Lattice values and their sources are deallocated with their alocation
3559 pool. */
3560 info->known_vals.release ();
3561 memset (info, 0, sizeof (*info));
3562 }
3563
3564 /* Free all ipa_node_params structures. */
3565
3566 void
3567 ipa_free_all_node_params (void)
3568 {
3569 int i;
3570 struct ipa_node_params *info;
3571
3572 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3573 ipa_free_node_params_substructures (info);
3574
3575 ipa_node_params_vector.release ();
3576 }
3577
3578 /* Set the aggregate replacements of NODE to be AGGVALS. */
3579
3580 void
3581 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3582 struct ipa_agg_replacement_value *aggvals)
3583 {
3584 if (vec_safe_length (ipa_node_agg_replacements)
3585 <= (unsigned) symtab->cgraph_max_uid)
3586 vec_safe_grow_cleared (ipa_node_agg_replacements,
3587 symtab->cgraph_max_uid + 1);
3588
3589 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3590 }
3591
3592 /* Hook that is called by cgraph.c when an edge is removed. */
3593
3594 static void
3595 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3596 {
3597 struct ipa_edge_args *args;
3598
3599 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3600 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3601 return;
3602
3603 args = IPA_EDGE_REF (cs);
3604 if (args->jump_functions)
3605 {
3606 struct ipa_jump_func *jf;
3607 int i;
3608 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3609 {
3610 struct ipa_cst_ref_desc *rdesc;
3611 try_decrement_rdesc_refcount (jf);
3612 if (jf->type == IPA_JF_CONST
3613 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3614 && rdesc->cs == cs)
3615 rdesc->cs = NULL;
3616 }
3617 }
3618
3619 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3620 }
3621
3622 /* Hook that is called by cgraph.c when a node is removed. */
3623
3624 static void
3625 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3626 {
3627 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3628 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3629 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3630 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3631 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3632 }
3633
3634 /* Hook that is called by cgraph.c when an edge is duplicated. */
3635
3636 static void
3637 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3638 __attribute__((unused)) void *data)
3639 {
3640 struct ipa_edge_args *old_args, *new_args;
3641 unsigned int i;
3642
3643 ipa_check_create_edge_args ();
3644
3645 old_args = IPA_EDGE_REF (src);
3646 new_args = IPA_EDGE_REF (dst);
3647
3648 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3649 if (old_args->polymorphic_call_contexts)
3650 new_args->polymorphic_call_contexts
3651 = vec_safe_copy (old_args->polymorphic_call_contexts);
3652
3653 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3654 {
3655 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3656 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3657
3658 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3659
3660 if (src_jf->type == IPA_JF_CONST)
3661 {
3662 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3663
3664 if (!src_rdesc)
3665 dst_jf->value.constant.rdesc = NULL;
3666 else if (src->caller == dst->caller)
3667 {
3668 struct ipa_ref *ref;
3669 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3670 gcc_checking_assert (n);
3671 ref = src->caller->find_reference (n, src->call_stmt,
3672 src->lto_stmt_uid);
3673 gcc_checking_assert (ref);
3674 dst->caller->clone_reference (ref, ref->stmt);
3675
3676 gcc_checking_assert (ipa_refdesc_pool);
3677 struct ipa_cst_ref_desc *dst_rdesc
3678 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3679 dst_rdesc->cs = dst;
3680 dst_rdesc->refcount = src_rdesc->refcount;
3681 dst_rdesc->next_duplicate = NULL;
3682 dst_jf->value.constant.rdesc = dst_rdesc;
3683 }
3684 else if (src_rdesc->cs == src)
3685 {
3686 struct ipa_cst_ref_desc *dst_rdesc;
3687 gcc_checking_assert (ipa_refdesc_pool);
3688 dst_rdesc
3689 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3690 dst_rdesc->cs = dst;
3691 dst_rdesc->refcount = src_rdesc->refcount;
3692 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3693 src_rdesc->next_duplicate = dst_rdesc;
3694 dst_jf->value.constant.rdesc = dst_rdesc;
3695 }
3696 else
3697 {
3698 struct ipa_cst_ref_desc *dst_rdesc;
3699 /* This can happen during inlining, when a JFUNC can refer to a
3700 reference taken in a function up in the tree of inline clones.
3701 We need to find the duplicate that refers to our tree of
3702 inline clones. */
3703
3704 gcc_assert (dst->caller->global.inlined_to);
3705 for (dst_rdesc = src_rdesc->next_duplicate;
3706 dst_rdesc;
3707 dst_rdesc = dst_rdesc->next_duplicate)
3708 {
3709 struct cgraph_node *top;
3710 top = dst_rdesc->cs->caller->global.inlined_to
3711 ? dst_rdesc->cs->caller->global.inlined_to
3712 : dst_rdesc->cs->caller;
3713 if (dst->caller->global.inlined_to == top)
3714 break;
3715 }
3716 gcc_assert (dst_rdesc);
3717 dst_jf->value.constant.rdesc = dst_rdesc;
3718 }
3719 }
3720 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3721 && src->caller == dst->caller)
3722 {
3723 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3724 ? dst->caller->global.inlined_to : dst->caller;
3725 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3726 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3727
3728 int c = ipa_get_controlled_uses (root_info, idx);
3729 if (c != IPA_UNDESCRIBED_USE)
3730 {
3731 c++;
3732 ipa_set_controlled_uses (root_info, idx, c);
3733 }
3734 }
3735 }
3736 }
3737
3738 /* Hook that is called by cgraph.c when a node is duplicated. */
3739
3740 static void
3741 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3742 ATTRIBUTE_UNUSED void *data)
3743 {
3744 struct ipa_node_params *old_info, *new_info;
3745 struct ipa_agg_replacement_value *old_av, *new_av;
3746
3747 ipa_check_create_node_params ();
3748 old_info = IPA_NODE_REF (src);
3749 new_info = IPA_NODE_REF (dst);
3750
3751 new_info->descriptors = old_info->descriptors.copy ();
3752 new_info->lattices = NULL;
3753 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3754
3755 new_info->analysis_done = old_info->analysis_done;
3756 new_info->node_enqueued = old_info->node_enqueued;
3757
3758 old_av = ipa_get_agg_replacements_for_node (src);
3759 if (!old_av)
3760 return;
3761
3762 new_av = NULL;
3763 while (old_av)
3764 {
3765 struct ipa_agg_replacement_value *v;
3766
3767 v = ggc_alloc<ipa_agg_replacement_value> ();
3768 memcpy (v, old_av, sizeof (*v));
3769 v->next = new_av;
3770 new_av = v;
3771 old_av = old_av->next;
3772 }
3773 ipa_set_node_agg_value_chain (dst, new_av);
3774 }
3775
3776
3777 /* Analyze newly added function into callgraph. */
3778
3779 static void
3780 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3781 {
3782 if (node->has_gimple_body_p ())
3783 ipa_analyze_node (node);
3784 }
3785
3786 /* Register our cgraph hooks if they are not already there. */
3787
3788 void
3789 ipa_register_cgraph_hooks (void)
3790 {
3791 if (!edge_removal_hook_holder)
3792 edge_removal_hook_holder =
3793 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3794 if (!node_removal_hook_holder)
3795 node_removal_hook_holder =
3796 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3797 if (!edge_duplication_hook_holder)
3798 edge_duplication_hook_holder =
3799 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3800 if (!node_duplication_hook_holder)
3801 node_duplication_hook_holder =
3802 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3803 function_insertion_hook_holder =
3804 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3805 }
3806
3807 /* Unregister our cgraph hooks if they are not already there. */
3808
3809 static void
3810 ipa_unregister_cgraph_hooks (void)
3811 {
3812 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3813 edge_removal_hook_holder = NULL;
3814 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3815 node_removal_hook_holder = NULL;
3816 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3817 edge_duplication_hook_holder = NULL;
3818 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3819 node_duplication_hook_holder = NULL;
3820 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3821 function_insertion_hook_holder = NULL;
3822 }
3823
3824 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3825 longer needed after ipa-cp. */
3826
3827 void
3828 ipa_free_all_structures_after_ipa_cp (void)
3829 {
3830 if (!optimize)
3831 {
3832 ipa_free_all_edge_args ();
3833 ipa_free_all_node_params ();
3834 free_alloc_pool (ipcp_sources_pool);
3835 free_alloc_pool (ipcp_values_pool);
3836 free_alloc_pool (ipcp_agg_lattice_pool);
3837 ipa_unregister_cgraph_hooks ();
3838 if (ipa_refdesc_pool)
3839 free_alloc_pool (ipa_refdesc_pool);
3840 }
3841 }
3842
3843 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3844 longer needed after indirect inlining. */
3845
3846 void
3847 ipa_free_all_structures_after_iinln (void)
3848 {
3849 ipa_free_all_edge_args ();
3850 ipa_free_all_node_params ();
3851 ipa_unregister_cgraph_hooks ();
3852 if (ipcp_sources_pool)
3853 free_alloc_pool (ipcp_sources_pool);
3854 if (ipcp_values_pool)
3855 free_alloc_pool (ipcp_values_pool);
3856 if (ipcp_agg_lattice_pool)
3857 free_alloc_pool (ipcp_agg_lattice_pool);
3858 if (ipa_refdesc_pool)
3859 free_alloc_pool (ipa_refdesc_pool);
3860 }
3861
3862 /* Print ipa_tree_map data structures of all functions in the
3863 callgraph to F. */
3864
3865 void
3866 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3867 {
3868 int i, count;
3869 struct ipa_node_params *info;
3870
3871 if (!node->definition)
3872 return;
3873 info = IPA_NODE_REF (node);
3874 fprintf (f, " function %s/%i parameter descriptors:\n",
3875 node->name (), node->order);
3876 count = ipa_get_param_count (info);
3877 for (i = 0; i < count; i++)
3878 {
3879 int c;
3880
3881 fprintf (f, " ");
3882 ipa_dump_param (f, info, i);
3883 if (ipa_is_param_used (info, i))
3884 fprintf (f, " used");
3885 c = ipa_get_controlled_uses (info, i);
3886 if (c == IPA_UNDESCRIBED_USE)
3887 fprintf (f, " undescribed_use");
3888 else
3889 fprintf (f, " controlled_uses=%i", c);
3890 fprintf (f, "\n");
3891 }
3892 }
3893
3894 /* Print ipa_tree_map data structures of all functions in the
3895 callgraph to F. */
3896
3897 void
3898 ipa_print_all_params (FILE * f)
3899 {
3900 struct cgraph_node *node;
3901
3902 fprintf (f, "\nFunction parameters:\n");
3903 FOR_EACH_FUNCTION (node)
3904 ipa_print_node_params (f, node);
3905 }
3906
3907 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3908
3909 vec<tree>
3910 ipa_get_vector_of_formal_parms (tree fndecl)
3911 {
3912 vec<tree> args;
3913 int count;
3914 tree parm;
3915
3916 gcc_assert (!flag_wpa);
3917 count = count_formal_params (fndecl);
3918 args.create (count);
3919 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3920 args.quick_push (parm);
3921
3922 return args;
3923 }
3924
3925 /* Return a heap allocated vector containing types of formal parameters of
3926 function type FNTYPE. */
3927
3928 vec<tree>
3929 ipa_get_vector_of_formal_parm_types (tree fntype)
3930 {
3931 vec<tree> types;
3932 int count = 0;
3933 tree t;
3934
3935 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3936 count++;
3937
3938 types.create (count);
3939 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3940 types.quick_push (TREE_VALUE (t));
3941
3942 return types;
3943 }
3944
3945 /* Modify the function declaration FNDECL and its type according to the plan in
3946 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3947 to reflect the actual parameters being modified which are determined by the
3948 base_index field. */
3949
3950 void
3951 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3952 {
3953 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3954 tree orig_type = TREE_TYPE (fndecl);
3955 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3956
3957 /* The following test is an ugly hack, some functions simply don't have any
3958 arguments in their type. This is probably a bug but well... */
3959 bool care_for_types = (old_arg_types != NULL_TREE);
3960 bool last_parm_void;
3961 vec<tree> otypes;
3962 if (care_for_types)
3963 {
3964 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3965 == void_type_node);
3966 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3967 if (last_parm_void)
3968 gcc_assert (oparms.length () + 1 == otypes.length ());
3969 else
3970 gcc_assert (oparms.length () == otypes.length ());
3971 }
3972 else
3973 {
3974 last_parm_void = false;
3975 otypes.create (0);
3976 }
3977
3978 int len = adjustments.length ();
3979 tree *link = &DECL_ARGUMENTS (fndecl);
3980 tree new_arg_types = NULL;
3981 for (int i = 0; i < len; i++)
3982 {
3983 struct ipa_parm_adjustment *adj;
3984 gcc_assert (link);
3985
3986 adj = &adjustments[i];
3987 tree parm;
3988 if (adj->op == IPA_PARM_OP_NEW)
3989 parm = NULL;
3990 else
3991 parm = oparms[adj->base_index];
3992 adj->base = parm;
3993
3994 if (adj->op == IPA_PARM_OP_COPY)
3995 {
3996 if (care_for_types)
3997 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3998 new_arg_types);
3999 *link = parm;
4000 link = &DECL_CHAIN (parm);
4001 }
4002 else if (adj->op != IPA_PARM_OP_REMOVE)
4003 {
4004 tree new_parm;
4005 tree ptype;
4006
4007 if (adj->by_ref)
4008 ptype = build_pointer_type (adj->type);
4009 else
4010 {
4011 ptype = adj->type;
4012 if (is_gimple_reg_type (ptype))
4013 {
4014 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4015 if (TYPE_ALIGN (ptype) < malign)
4016 ptype = build_aligned_type (ptype, malign);
4017 }
4018 }
4019
4020 if (care_for_types)
4021 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4022
4023 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4024 ptype);
4025 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4026 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4027 DECL_ARTIFICIAL (new_parm) = 1;
4028 DECL_ARG_TYPE (new_parm) = ptype;
4029 DECL_CONTEXT (new_parm) = fndecl;
4030 TREE_USED (new_parm) = 1;
4031 DECL_IGNORED_P (new_parm) = 1;
4032 layout_decl (new_parm, 0);
4033
4034 if (adj->op == IPA_PARM_OP_NEW)
4035 adj->base = NULL;
4036 else
4037 adj->base = parm;
4038 adj->new_decl = new_parm;
4039
4040 *link = new_parm;
4041 link = &DECL_CHAIN (new_parm);
4042 }
4043 }
4044
4045 *link = NULL_TREE;
4046
4047 tree new_reversed = NULL;
4048 if (care_for_types)
4049 {
4050 new_reversed = nreverse (new_arg_types);
4051 if (last_parm_void)
4052 {
4053 if (new_reversed)
4054 TREE_CHAIN (new_arg_types) = void_list_node;
4055 else
4056 new_reversed = void_list_node;
4057 }
4058 }
4059
4060 /* Use copy_node to preserve as much as possible from original type
4061 (debug info, attribute lists etc.)
4062 Exception is METHOD_TYPEs must have THIS argument.
4063 When we are asked to remove it, we need to build new FUNCTION_TYPE
4064 instead. */
4065 tree new_type = NULL;
4066 if (TREE_CODE (orig_type) != METHOD_TYPE
4067 || (adjustments[0].op == IPA_PARM_OP_COPY
4068 && adjustments[0].base_index == 0))
4069 {
4070 new_type = build_distinct_type_copy (orig_type);
4071 TYPE_ARG_TYPES (new_type) = new_reversed;
4072 }
4073 else
4074 {
4075 new_type
4076 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4077 new_reversed));
4078 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4079 DECL_VINDEX (fndecl) = NULL_TREE;
4080 }
4081
4082 /* When signature changes, we need to clear builtin info. */
4083 if (DECL_BUILT_IN (fndecl))
4084 {
4085 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4086 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4087 }
4088
4089 TREE_TYPE (fndecl) = new_type;
4090 DECL_VIRTUAL_P (fndecl) = 0;
4091 DECL_LANG_SPECIFIC (fndecl) = NULL;
4092 otypes.release ();
4093 oparms.release ();
4094 }
4095
4096 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4097 If this is a directly recursive call, CS must be NULL. Otherwise it must
4098 contain the corresponding call graph edge. */
4099
4100 void
4101 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
4102 ipa_parm_adjustment_vec adjustments)
4103 {
4104 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4105 vec<tree> vargs;
4106 vec<tree, va_gc> **debug_args = NULL;
4107 gimple new_stmt;
4108 gimple_stmt_iterator gsi, prev_gsi;
4109 tree callee_decl;
4110 int i, len;
4111
4112 len = adjustments.length ();
4113 vargs.create (len);
4114 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4115 current_node->remove_stmt_references (stmt);
4116
4117 gsi = gsi_for_stmt (stmt);
4118 prev_gsi = gsi;
4119 gsi_prev (&prev_gsi);
4120 for (i = 0; i < len; i++)
4121 {
4122 struct ipa_parm_adjustment *adj;
4123
4124 adj = &adjustments[i];
4125
4126 if (adj->op == IPA_PARM_OP_COPY)
4127 {
4128 tree arg = gimple_call_arg (stmt, adj->base_index);
4129
4130 vargs.quick_push (arg);
4131 }
4132 else if (adj->op != IPA_PARM_OP_REMOVE)
4133 {
4134 tree expr, base, off;
4135 location_t loc;
4136 unsigned int deref_align = 0;
4137 bool deref_base = false;
4138
4139 /* We create a new parameter out of the value of the old one, we can
4140 do the following kind of transformations:
4141
4142 - A scalar passed by reference is converted to a scalar passed by
4143 value. (adj->by_ref is false and the type of the original
4144 actual argument is a pointer to a scalar).
4145
4146 - A part of an aggregate is passed instead of the whole aggregate.
4147 The part can be passed either by value or by reference, this is
4148 determined by value of adj->by_ref. Moreover, the code below
4149 handles both situations when the original aggregate is passed by
4150 value (its type is not a pointer) and when it is passed by
4151 reference (it is a pointer to an aggregate).
4152
4153 When the new argument is passed by reference (adj->by_ref is true)
4154 it must be a part of an aggregate and therefore we form it by
4155 simply taking the address of a reference inside the original
4156 aggregate. */
4157
4158 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4159 base = gimple_call_arg (stmt, adj->base_index);
4160 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4161 : EXPR_LOCATION (base);
4162
4163 if (TREE_CODE (base) != ADDR_EXPR
4164 && POINTER_TYPE_P (TREE_TYPE (base)))
4165 off = build_int_cst (adj->alias_ptr_type,
4166 adj->offset / BITS_PER_UNIT);
4167 else
4168 {
4169 HOST_WIDE_INT base_offset;
4170 tree prev_base;
4171 bool addrof;
4172
4173 if (TREE_CODE (base) == ADDR_EXPR)
4174 {
4175 base = TREE_OPERAND (base, 0);
4176 addrof = true;
4177 }
4178 else
4179 addrof = false;
4180 prev_base = base;
4181 base = get_addr_base_and_unit_offset (base, &base_offset);
4182 /* Aggregate arguments can have non-invariant addresses. */
4183 if (!base)
4184 {
4185 base = build_fold_addr_expr (prev_base);
4186 off = build_int_cst (adj->alias_ptr_type,
4187 adj->offset / BITS_PER_UNIT);
4188 }
4189 else if (TREE_CODE (base) == MEM_REF)
4190 {
4191 if (!addrof)
4192 {
4193 deref_base = true;
4194 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4195 }
4196 off = build_int_cst (adj->alias_ptr_type,
4197 base_offset
4198 + adj->offset / BITS_PER_UNIT);
4199 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4200 off);
4201 base = TREE_OPERAND (base, 0);
4202 }
4203 else
4204 {
4205 off = build_int_cst (adj->alias_ptr_type,
4206 base_offset
4207 + adj->offset / BITS_PER_UNIT);
4208 base = build_fold_addr_expr (base);
4209 }
4210 }
4211
4212 if (!adj->by_ref)
4213 {
4214 tree type = adj->type;
4215 unsigned int align;
4216 unsigned HOST_WIDE_INT misalign;
4217
4218 if (deref_base)
4219 {
4220 align = deref_align;
4221 misalign = 0;
4222 }
4223 else
4224 {
4225 get_pointer_alignment_1 (base, &align, &misalign);
4226 if (TYPE_ALIGN (type) > align)
4227 align = TYPE_ALIGN (type);
4228 }
4229 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4230 * BITS_PER_UNIT);
4231 misalign = misalign & (align - 1);
4232 if (misalign != 0)
4233 align = (misalign & -misalign);
4234 if (align < TYPE_ALIGN (type))
4235 type = build_aligned_type (type, align);
4236 base = force_gimple_operand_gsi (&gsi, base,
4237 true, NULL, true, GSI_SAME_STMT);
4238 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4239 /* If expr is not a valid gimple call argument emit
4240 a load into a temporary. */
4241 if (is_gimple_reg_type (TREE_TYPE (expr)))
4242 {
4243 gimple tem = gimple_build_assign (NULL_TREE, expr);
4244 if (gimple_in_ssa_p (cfun))
4245 {
4246 gimple_set_vuse (tem, gimple_vuse (stmt));
4247 expr = make_ssa_name (TREE_TYPE (expr), tem);
4248 }
4249 else
4250 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4251 gimple_assign_set_lhs (tem, expr);
4252 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4253 }
4254 }
4255 else
4256 {
4257 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4258 expr = build_fold_addr_expr (expr);
4259 expr = force_gimple_operand_gsi (&gsi, expr,
4260 true, NULL, true, GSI_SAME_STMT);
4261 }
4262 vargs.quick_push (expr);
4263 }
4264 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4265 {
4266 unsigned int ix;
4267 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4268 gimple def_temp;
4269
4270 arg = gimple_call_arg (stmt, adj->base_index);
4271 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4272 {
4273 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4274 continue;
4275 arg = fold_convert_loc (gimple_location (stmt),
4276 TREE_TYPE (origin), arg);
4277 }
4278 if (debug_args == NULL)
4279 debug_args = decl_debug_args_insert (callee_decl);
4280 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4281 if (ddecl == origin)
4282 {
4283 ddecl = (**debug_args)[ix + 1];
4284 break;
4285 }
4286 if (ddecl == NULL)
4287 {
4288 ddecl = make_node (DEBUG_EXPR_DECL);
4289 DECL_ARTIFICIAL (ddecl) = 1;
4290 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4291 DECL_MODE (ddecl) = DECL_MODE (origin);
4292
4293 vec_safe_push (*debug_args, origin);
4294 vec_safe_push (*debug_args, ddecl);
4295 }
4296 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4297 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4298 }
4299 }
4300
4301 if (dump_file && (dump_flags & TDF_DETAILS))
4302 {
4303 fprintf (dump_file, "replacing stmt:");
4304 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4305 }
4306
4307 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4308 vargs.release ();
4309 if (gimple_call_lhs (stmt))
4310 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4311
4312 gimple_set_block (new_stmt, gimple_block (stmt));
4313 if (gimple_has_location (stmt))
4314 gimple_set_location (new_stmt, gimple_location (stmt));
4315 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4316 gimple_call_copy_flags (new_stmt, stmt);
4317 if (gimple_in_ssa_p (cfun))
4318 {
4319 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4320 if (gimple_vdef (stmt))
4321 {
4322 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4323 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4324 }
4325 }
4326
4327 if (dump_file && (dump_flags & TDF_DETAILS))
4328 {
4329 fprintf (dump_file, "with stmt:");
4330 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4331 fprintf (dump_file, "\n");
4332 }
4333 gsi_replace (&gsi, new_stmt, true);
4334 if (cs)
4335 cs->set_call_stmt (new_stmt);
4336 do
4337 {
4338 current_node->record_stmt_references (gsi_stmt (gsi));
4339 gsi_prev (&gsi);
4340 }
4341 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4342 }
4343
4344 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4345 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4346 specifies whether the function should care about type incompatibility the
4347 current and new expressions. If it is false, the function will leave
4348 incompatibility issues to the caller. Return true iff the expression
4349 was modified. */
4350
4351 bool
4352 ipa_modify_expr (tree *expr, bool convert,
4353 ipa_parm_adjustment_vec adjustments)
4354 {
4355 struct ipa_parm_adjustment *cand
4356 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4357 if (!cand)
4358 return false;
4359
4360 tree src;
4361 if (cand->by_ref)
4362 src = build_simple_mem_ref (cand->new_decl);
4363 else
4364 src = cand->new_decl;
4365
4366 if (dump_file && (dump_flags & TDF_DETAILS))
4367 {
4368 fprintf (dump_file, "About to replace expr ");
4369 print_generic_expr (dump_file, *expr, 0);
4370 fprintf (dump_file, " with ");
4371 print_generic_expr (dump_file, src, 0);
4372 fprintf (dump_file, "\n");
4373 }
4374
4375 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4376 {
4377 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4378 *expr = vce;
4379 }
4380 else
4381 *expr = src;
4382 return true;
4383 }
4384
4385 /* If T is an SSA_NAME, return NULL if it is not a default def or
4386 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4387 the base variable is always returned, regardless if it is a default
4388 def. Return T if it is not an SSA_NAME. */
4389
4390 static tree
4391 get_ssa_base_param (tree t, bool ignore_default_def)
4392 {
4393 if (TREE_CODE (t) == SSA_NAME)
4394 {
4395 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4396 return SSA_NAME_VAR (t);
4397 else
4398 return NULL_TREE;
4399 }
4400 return t;
4401 }
4402
4403 /* Given an expression, return an adjustment entry specifying the
4404 transformation to be done on EXPR. If no suitable adjustment entry
4405 was found, returns NULL.
4406
4407 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4408 default def, otherwise bail on them.
4409
4410 If CONVERT is non-NULL, this function will set *CONVERT if the
4411 expression provided is a component reference. ADJUSTMENTS is the
4412 adjustments vector. */
4413
4414 ipa_parm_adjustment *
4415 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4416 ipa_parm_adjustment_vec adjustments,
4417 bool ignore_default_def)
4418 {
4419 if (TREE_CODE (**expr) == BIT_FIELD_REF
4420 || TREE_CODE (**expr) == IMAGPART_EXPR
4421 || TREE_CODE (**expr) == REALPART_EXPR)
4422 {
4423 *expr = &TREE_OPERAND (**expr, 0);
4424 if (convert)
4425 *convert = true;
4426 }
4427
4428 HOST_WIDE_INT offset, size, max_size;
4429 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4430 if (!base || size == -1 || max_size == -1)
4431 return NULL;
4432
4433 if (TREE_CODE (base) == MEM_REF)
4434 {
4435 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4436 base = TREE_OPERAND (base, 0);
4437 }
4438
4439 base = get_ssa_base_param (base, ignore_default_def);
4440 if (!base || TREE_CODE (base) != PARM_DECL)
4441 return NULL;
4442
4443 struct ipa_parm_adjustment *cand = NULL;
4444 unsigned int len = adjustments.length ();
4445 for (unsigned i = 0; i < len; i++)
4446 {
4447 struct ipa_parm_adjustment *adj = &adjustments[i];
4448
4449 if (adj->base == base
4450 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4451 {
4452 cand = adj;
4453 break;
4454 }
4455 }
4456
4457 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4458 return NULL;
4459 return cand;
4460 }
4461
4462 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4463
4464 static bool
4465 index_in_adjustments_multiple_times_p (int base_index,
4466 ipa_parm_adjustment_vec adjustments)
4467 {
4468 int i, len = adjustments.length ();
4469 bool one = false;
4470
4471 for (i = 0; i < len; i++)
4472 {
4473 struct ipa_parm_adjustment *adj;
4474 adj = &adjustments[i];
4475
4476 if (adj->base_index == base_index)
4477 {
4478 if (one)
4479 return true;
4480 else
4481 one = true;
4482 }
4483 }
4484 return false;
4485 }
4486
4487
4488 /* Return adjustments that should have the same effect on function parameters
4489 and call arguments as if they were first changed according to adjustments in
4490 INNER and then by adjustments in OUTER. */
4491
4492 ipa_parm_adjustment_vec
4493 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4494 ipa_parm_adjustment_vec outer)
4495 {
4496 int i, outlen = outer.length ();
4497 int inlen = inner.length ();
4498 int removals = 0;
4499 ipa_parm_adjustment_vec adjustments, tmp;
4500
4501 tmp.create (inlen);
4502 for (i = 0; i < inlen; i++)
4503 {
4504 struct ipa_parm_adjustment *n;
4505 n = &inner[i];
4506
4507 if (n->op == IPA_PARM_OP_REMOVE)
4508 removals++;
4509 else
4510 {
4511 /* FIXME: Handling of new arguments are not implemented yet. */
4512 gcc_assert (n->op != IPA_PARM_OP_NEW);
4513 tmp.quick_push (*n);
4514 }
4515 }
4516
4517 adjustments.create (outlen + removals);
4518 for (i = 0; i < outlen; i++)
4519 {
4520 struct ipa_parm_adjustment r;
4521 struct ipa_parm_adjustment *out = &outer[i];
4522 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4523
4524 memset (&r, 0, sizeof (r));
4525 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4526 if (out->op == IPA_PARM_OP_REMOVE)
4527 {
4528 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4529 {
4530 r.op = IPA_PARM_OP_REMOVE;
4531 adjustments.quick_push (r);
4532 }
4533 continue;
4534 }
4535 else
4536 {
4537 /* FIXME: Handling of new arguments are not implemented yet. */
4538 gcc_assert (out->op != IPA_PARM_OP_NEW);
4539 }
4540
4541 r.base_index = in->base_index;
4542 r.type = out->type;
4543
4544 /* FIXME: Create nonlocal value too. */
4545
4546 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4547 r.op = IPA_PARM_OP_COPY;
4548 else if (in->op == IPA_PARM_OP_COPY)
4549 r.offset = out->offset;
4550 else if (out->op == IPA_PARM_OP_COPY)
4551 r.offset = in->offset;
4552 else
4553 r.offset = in->offset + out->offset;
4554 adjustments.quick_push (r);
4555 }
4556
4557 for (i = 0; i < inlen; i++)
4558 {
4559 struct ipa_parm_adjustment *n = &inner[i];
4560
4561 if (n->op == IPA_PARM_OP_REMOVE)
4562 adjustments.quick_push (*n);
4563 }
4564
4565 tmp.release ();
4566 return adjustments;
4567 }
4568
4569 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4570 friendly way, assuming they are meant to be applied to FNDECL. */
4571
4572 void
4573 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4574 tree fndecl)
4575 {
4576 int i, len = adjustments.length ();
4577 bool first = true;
4578 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4579
4580 fprintf (file, "IPA param adjustments: ");
4581 for (i = 0; i < len; i++)
4582 {
4583 struct ipa_parm_adjustment *adj;
4584 adj = &adjustments[i];
4585
4586 if (!first)
4587 fprintf (file, " ");
4588 else
4589 first = false;
4590
4591 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4592 print_generic_expr (file, parms[adj->base_index], 0);
4593 if (adj->base)
4594 {
4595 fprintf (file, ", base: ");
4596 print_generic_expr (file, adj->base, 0);
4597 }
4598 if (adj->new_decl)
4599 {
4600 fprintf (file, ", new_decl: ");
4601 print_generic_expr (file, adj->new_decl, 0);
4602 }
4603 if (adj->new_ssa_base)
4604 {
4605 fprintf (file, ", new_ssa_base: ");
4606 print_generic_expr (file, adj->new_ssa_base, 0);
4607 }
4608
4609 if (adj->op == IPA_PARM_OP_COPY)
4610 fprintf (file, ", copy_param");
4611 else if (adj->op == IPA_PARM_OP_REMOVE)
4612 fprintf (file, ", remove_param");
4613 else
4614 fprintf (file, ", offset %li", (long) adj->offset);
4615 if (adj->by_ref)
4616 fprintf (file, ", by_ref");
4617 print_node_brief (file, ", type: ", adj->type, 0);
4618 fprintf (file, "\n");
4619 }
4620 parms.release ();
4621 }
4622
4623 /* Dump the AV linked list. */
4624
4625 void
4626 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4627 {
4628 bool comma = false;
4629 fprintf (f, " Aggregate replacements:");
4630 for (; av; av = av->next)
4631 {
4632 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4633 av->index, av->offset);
4634 print_generic_expr (f, av->value, 0);
4635 comma = true;
4636 }
4637 fprintf (f, "\n");
4638 }
4639
4640 /* Stream out jump function JUMP_FUNC to OB. */
4641
4642 static void
4643 ipa_write_jump_function (struct output_block *ob,
4644 struct ipa_jump_func *jump_func)
4645 {
4646 struct ipa_agg_jf_item *item;
4647 struct bitpack_d bp;
4648 int i, count;
4649
4650 streamer_write_uhwi (ob, jump_func->type);
4651 switch (jump_func->type)
4652 {
4653 case IPA_JF_UNKNOWN:
4654 break;
4655 case IPA_JF_KNOWN_TYPE:
4656 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4657 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4658 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4659 break;
4660 case IPA_JF_CONST:
4661 gcc_assert (
4662 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4663 stream_write_tree (ob, jump_func->value.constant.value, true);
4664 break;
4665 case IPA_JF_PASS_THROUGH:
4666 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4667 if (jump_func->value.pass_through.operation == NOP_EXPR)
4668 {
4669 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4670 bp = bitpack_create (ob->main_stream);
4671 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4672 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4673 streamer_write_bitpack (&bp);
4674 }
4675 else
4676 {
4677 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4678 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4679 }
4680 break;
4681 case IPA_JF_ANCESTOR:
4682 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4683 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4684 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4685 bp = bitpack_create (ob->main_stream);
4686 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4687 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4688 streamer_write_bitpack (&bp);
4689 break;
4690 }
4691
4692 count = vec_safe_length (jump_func->agg.items);
4693 streamer_write_uhwi (ob, count);
4694 if (count)
4695 {
4696 bp = bitpack_create (ob->main_stream);
4697 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4698 streamer_write_bitpack (&bp);
4699 }
4700
4701 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4702 {
4703 streamer_write_uhwi (ob, item->offset);
4704 stream_write_tree (ob, item->value, true);
4705 }
4706 }
4707
4708 /* Read in jump function JUMP_FUNC from IB. */
4709
4710 static void
4711 ipa_read_jump_function (struct lto_input_block *ib,
4712 struct ipa_jump_func *jump_func,
4713 struct cgraph_edge *cs,
4714 struct data_in *data_in)
4715 {
4716 enum jump_func_type jftype;
4717 enum tree_code operation;
4718 int i, count;
4719
4720 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4721 switch (jftype)
4722 {
4723 case IPA_JF_UNKNOWN:
4724 jump_func->type = IPA_JF_UNKNOWN;
4725 break;
4726 case IPA_JF_KNOWN_TYPE:
4727 {
4728 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4729 tree base_type = stream_read_tree (ib, data_in);
4730 tree component_type = stream_read_tree (ib, data_in);
4731
4732 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4733 break;
4734 }
4735 case IPA_JF_CONST:
4736 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4737 break;
4738 case IPA_JF_PASS_THROUGH:
4739 operation = (enum tree_code) streamer_read_uhwi (ib);
4740 if (operation == NOP_EXPR)
4741 {
4742 int formal_id = streamer_read_uhwi (ib);
4743 struct bitpack_d bp = streamer_read_bitpack (ib);
4744 bool agg_preserved = bp_unpack_value (&bp, 1);
4745 bool type_preserved = bp_unpack_value (&bp, 1);
4746 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4747 type_preserved);
4748 }
4749 else
4750 {
4751 tree operand = stream_read_tree (ib, data_in);
4752 int formal_id = streamer_read_uhwi (ib);
4753 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4754 operation);
4755 }
4756 break;
4757 case IPA_JF_ANCESTOR:
4758 {
4759 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4760 tree type = stream_read_tree (ib, data_in);
4761 int formal_id = streamer_read_uhwi (ib);
4762 struct bitpack_d bp = streamer_read_bitpack (ib);
4763 bool agg_preserved = bp_unpack_value (&bp, 1);
4764 bool type_preserved = bp_unpack_value (&bp, 1);
4765
4766 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4767 type_preserved);
4768 break;
4769 }
4770 }
4771
4772 count = streamer_read_uhwi (ib);
4773 vec_alloc (jump_func->agg.items, count);
4774 if (count)
4775 {
4776 struct bitpack_d bp = streamer_read_bitpack (ib);
4777 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4778 }
4779 for (i = 0; i < count; i++)
4780 {
4781 struct ipa_agg_jf_item item;
4782 item.offset = streamer_read_uhwi (ib);
4783 item.value = stream_read_tree (ib, data_in);
4784 jump_func->agg.items->quick_push (item);
4785 }
4786 }
4787
4788 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4789 relevant to indirect inlining to OB. */
4790
4791 static void
4792 ipa_write_indirect_edge_info (struct output_block *ob,
4793 struct cgraph_edge *cs)
4794 {
4795 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4796 struct bitpack_d bp;
4797
4798 streamer_write_hwi (ob, ii->param_index);
4799 bp = bitpack_create (ob->main_stream);
4800 bp_pack_value (&bp, ii->polymorphic, 1);
4801 bp_pack_value (&bp, ii->agg_contents, 1);
4802 bp_pack_value (&bp, ii->member_ptr, 1);
4803 bp_pack_value (&bp, ii->by_ref, 1);
4804 streamer_write_bitpack (&bp);
4805 if (ii->agg_contents || ii->polymorphic)
4806 streamer_write_hwi (ob, ii->offset);
4807 else
4808 gcc_assert (ii->offset == 0);
4809
4810 if (ii->polymorphic)
4811 {
4812 streamer_write_hwi (ob, ii->otr_token);
4813 stream_write_tree (ob, ii->otr_type, true);
4814 ii->context.stream_out (ob);
4815 }
4816 }
4817
4818 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4819 relevant to indirect inlining from IB. */
4820
4821 static void
4822 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4823 struct data_in *data_in,
4824 struct cgraph_edge *cs)
4825 {
4826 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4827 struct bitpack_d bp;
4828
4829 ii->param_index = (int) streamer_read_hwi (ib);
4830 bp = streamer_read_bitpack (ib);
4831 ii->polymorphic = bp_unpack_value (&bp, 1);
4832 ii->agg_contents = bp_unpack_value (&bp, 1);
4833 ii->member_ptr = bp_unpack_value (&bp, 1);
4834 ii->by_ref = bp_unpack_value (&bp, 1);
4835 if (ii->agg_contents || ii->polymorphic)
4836 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4837 else
4838 ii->offset = 0;
4839 if (ii->polymorphic)
4840 {
4841 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4842 ii->otr_type = stream_read_tree (ib, data_in);
4843 ii->context.stream_in (ib, data_in);
4844 }
4845 }
4846
4847 /* Stream out NODE info to OB. */
4848
4849 static void
4850 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4851 {
4852 int node_ref;
4853 lto_symtab_encoder_t encoder;
4854 struct ipa_node_params *info = IPA_NODE_REF (node);
4855 int j;
4856 struct cgraph_edge *e;
4857 struct bitpack_d bp;
4858
4859 encoder = ob->decl_state->symtab_node_encoder;
4860 node_ref = lto_symtab_encoder_encode (encoder, node);
4861 streamer_write_uhwi (ob, node_ref);
4862
4863 streamer_write_uhwi (ob, ipa_get_param_count (info));
4864 for (j = 0; j < ipa_get_param_count (info); j++)
4865 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4866 bp = bitpack_create (ob->main_stream);
4867 gcc_assert (info->analysis_done
4868 || ipa_get_param_count (info) == 0);
4869 gcc_assert (!info->node_enqueued);
4870 gcc_assert (!info->ipcp_orig_node);
4871 for (j = 0; j < ipa_get_param_count (info); j++)
4872 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4873 streamer_write_bitpack (&bp);
4874 for (j = 0; j < ipa_get_param_count (info); j++)
4875 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4876 for (e = node->callees; e; e = e->next_callee)
4877 {
4878 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4879
4880 streamer_write_uhwi (ob,
4881 ipa_get_cs_argument_count (args) * 2
4882 + (args->polymorphic_call_contexts != NULL));
4883 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4884 {
4885 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4886 if (args->polymorphic_call_contexts != NULL)
4887 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4888 }
4889 }
4890 for (e = node->indirect_calls; e; e = e->next_callee)
4891 {
4892 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4893
4894 streamer_write_uhwi (ob,
4895 ipa_get_cs_argument_count (args) * 2
4896 + (args->polymorphic_call_contexts != NULL));
4897 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4898 {
4899 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4900 if (args->polymorphic_call_contexts != NULL)
4901 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4902 }
4903 ipa_write_indirect_edge_info (ob, e);
4904 }
4905 }
4906
4907 /* Stream in NODE info from IB. */
4908
4909 static void
4910 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4911 struct data_in *data_in)
4912 {
4913 struct ipa_node_params *info = IPA_NODE_REF (node);
4914 int k;
4915 struct cgraph_edge *e;
4916 struct bitpack_d bp;
4917
4918 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4919
4920 for (k = 0; k < ipa_get_param_count (info); k++)
4921 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4922
4923 bp = streamer_read_bitpack (ib);
4924 if (ipa_get_param_count (info) != 0)
4925 info->analysis_done = true;
4926 info->node_enqueued = false;
4927 for (k = 0; k < ipa_get_param_count (info); k++)
4928 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4929 for (k = 0; k < ipa_get_param_count (info); k++)
4930 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4931 for (e = node->callees; e; e = e->next_callee)
4932 {
4933 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4934 int count = streamer_read_uhwi (ib);
4935 bool contexts_computed = count & 1;
4936 count /= 2;
4937
4938 if (!count)
4939 continue;
4940 vec_safe_grow_cleared (args->jump_functions, count);
4941 if (contexts_computed)
4942 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4943
4944 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4945 {
4946 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4947 data_in);
4948 if (contexts_computed)
4949 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4950 }
4951 }
4952 for (e = node->indirect_calls; e; e = e->next_callee)
4953 {
4954 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4955 int count = streamer_read_uhwi (ib);
4956 bool contexts_computed = count & 1;
4957 count /= 2;
4958
4959 if (count)
4960 {
4961 vec_safe_grow_cleared (args->jump_functions, count);
4962 if (contexts_computed)
4963 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4964 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4965 {
4966 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4967 data_in);
4968 if (contexts_computed)
4969 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4970 }
4971 }
4972 ipa_read_indirect_edge_info (ib, data_in, e);
4973 }
4974 }
4975
4976 /* Write jump functions for nodes in SET. */
4977
4978 void
4979 ipa_prop_write_jump_functions (void)
4980 {
4981 struct cgraph_node *node;
4982 struct output_block *ob;
4983 unsigned int count = 0;
4984 lto_symtab_encoder_iterator lsei;
4985 lto_symtab_encoder_t encoder;
4986
4987
4988 if (!ipa_node_params_vector.exists ())
4989 return;
4990
4991 ob = create_output_block (LTO_section_jump_functions);
4992 encoder = ob->decl_state->symtab_node_encoder;
4993 ob->symbol = NULL;
4994 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4995 lsei_next_function_in_partition (&lsei))
4996 {
4997 node = lsei_cgraph_node (lsei);
4998 if (node->has_gimple_body_p ()
4999 && IPA_NODE_REF (node) != NULL)
5000 count++;
5001 }
5002
5003 streamer_write_uhwi (ob, count);
5004
5005 /* Process all of the functions. */
5006 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5007 lsei_next_function_in_partition (&lsei))
5008 {
5009 node = lsei_cgraph_node (lsei);
5010 if (node->has_gimple_body_p ()
5011 && IPA_NODE_REF (node) != NULL)
5012 ipa_write_node_info (ob, node);
5013 }
5014 streamer_write_char_stream (ob->main_stream, 0);
5015 produce_asm (ob, NULL);
5016 destroy_output_block (ob);
5017 }
5018
5019 /* Read section in file FILE_DATA of length LEN with data DATA. */
5020
5021 static void
5022 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5023 size_t len)
5024 {
5025 const struct lto_function_header *header =
5026 (const struct lto_function_header *) data;
5027 const int cfg_offset = sizeof (struct lto_function_header);
5028 const int main_offset = cfg_offset + header->cfg_size;
5029 const int string_offset = main_offset + header->main_size;
5030 struct data_in *data_in;
5031 unsigned int i;
5032 unsigned int count;
5033
5034 lto_input_block ib_main ((const char *) data + main_offset,
5035 header->main_size);
5036
5037 data_in =
5038 lto_data_in_create (file_data, (const char *) data + string_offset,
5039 header->string_size, vNULL);
5040 count = streamer_read_uhwi (&ib_main);
5041
5042 for (i = 0; i < count; i++)
5043 {
5044 unsigned int index;
5045 struct cgraph_node *node;
5046 lto_symtab_encoder_t encoder;
5047
5048 index = streamer_read_uhwi (&ib_main);
5049 encoder = file_data->symtab_node_encoder;
5050 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5051 index));
5052 gcc_assert (node->definition);
5053 ipa_read_node_info (&ib_main, node, data_in);
5054 }
5055 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5056 len);
5057 lto_data_in_delete (data_in);
5058 }
5059
5060 /* Read ipcp jump functions. */
5061
5062 void
5063 ipa_prop_read_jump_functions (void)
5064 {
5065 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5066 struct lto_file_decl_data *file_data;
5067 unsigned int j = 0;
5068
5069 ipa_check_create_node_params ();
5070 ipa_check_create_edge_args ();
5071 ipa_register_cgraph_hooks ();
5072
5073 while ((file_data = file_data_vec[j++]))
5074 {
5075 size_t len;
5076 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5077
5078 if (data)
5079 ipa_prop_read_section (file_data, data, len);
5080 }
5081 }
5082
5083 /* After merging units, we can get mismatch in argument counts.
5084 Also decl merging might've rendered parameter lists obsolete.
5085 Also compute called_with_variable_arg info. */
5086
5087 void
5088 ipa_update_after_lto_read (void)
5089 {
5090 ipa_check_create_node_params ();
5091 ipa_check_create_edge_args ();
5092 }
5093
5094 void
5095 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
5096 {
5097 int node_ref;
5098 unsigned int count = 0;
5099 lto_symtab_encoder_t encoder;
5100 struct ipa_agg_replacement_value *aggvals, *av;
5101
5102 aggvals = ipa_get_agg_replacements_for_node (node);
5103 encoder = ob->decl_state->symtab_node_encoder;
5104 node_ref = lto_symtab_encoder_encode (encoder, node);
5105 streamer_write_uhwi (ob, node_ref);
5106
5107 for (av = aggvals; av; av = av->next)
5108 count++;
5109 streamer_write_uhwi (ob, count);
5110
5111 for (av = aggvals; av; av = av->next)
5112 {
5113 struct bitpack_d bp;
5114
5115 streamer_write_uhwi (ob, av->offset);
5116 streamer_write_uhwi (ob, av->index);
5117 stream_write_tree (ob, av->value, true);
5118
5119 bp = bitpack_create (ob->main_stream);
5120 bp_pack_value (&bp, av->by_ref, 1);
5121 streamer_write_bitpack (&bp);
5122 }
5123 }
5124
5125 /* Stream in the aggregate value replacement chain for NODE from IB. */
5126
5127 static void
5128 read_agg_replacement_chain (struct lto_input_block *ib,
5129 struct cgraph_node *node,
5130 struct data_in *data_in)
5131 {
5132 struct ipa_agg_replacement_value *aggvals = NULL;
5133 unsigned int count, i;
5134
5135 count = streamer_read_uhwi (ib);
5136 for (i = 0; i <count; i++)
5137 {
5138 struct ipa_agg_replacement_value *av;
5139 struct bitpack_d bp;
5140
5141 av = ggc_alloc<ipa_agg_replacement_value> ();
5142 av->offset = streamer_read_uhwi (ib);
5143 av->index = streamer_read_uhwi (ib);
5144 av->value = stream_read_tree (ib, data_in);
5145 bp = streamer_read_bitpack (ib);
5146 av->by_ref = bp_unpack_value (&bp, 1);
5147 av->next = aggvals;
5148 aggvals = av;
5149 }
5150 ipa_set_node_agg_value_chain (node, aggvals);
5151 }
5152
5153 /* Write all aggregate replacement for nodes in set. */
5154
5155 void
5156 ipa_prop_write_all_agg_replacement (void)
5157 {
5158 struct cgraph_node *node;
5159 struct output_block *ob;
5160 unsigned int count = 0;
5161 lto_symtab_encoder_iterator lsei;
5162 lto_symtab_encoder_t encoder;
5163
5164 if (!ipa_node_agg_replacements)
5165 return;
5166
5167 ob = create_output_block (LTO_section_ipcp_transform);
5168 encoder = ob->decl_state->symtab_node_encoder;
5169 ob->symbol = NULL;
5170 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5171 lsei_next_function_in_partition (&lsei))
5172 {
5173 node = lsei_cgraph_node (lsei);
5174 if (node->has_gimple_body_p ()
5175 && ipa_get_agg_replacements_for_node (node) != NULL)
5176 count++;
5177 }
5178
5179 streamer_write_uhwi (ob, count);
5180
5181 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5182 lsei_next_function_in_partition (&lsei))
5183 {
5184 node = lsei_cgraph_node (lsei);
5185 if (node->has_gimple_body_p ()
5186 && ipa_get_agg_replacements_for_node (node) != NULL)
5187 write_agg_replacement_chain (ob, node);
5188 }
5189 streamer_write_char_stream (ob->main_stream, 0);
5190 produce_asm (ob, NULL);
5191 destroy_output_block (ob);
5192 }
5193
5194 /* Read replacements section in file FILE_DATA of length LEN with data
5195 DATA. */
5196
5197 static void
5198 read_replacements_section (struct lto_file_decl_data *file_data,
5199 const char *data,
5200 size_t len)
5201 {
5202 const struct lto_function_header *header =
5203 (const struct lto_function_header *) data;
5204 const int cfg_offset = sizeof (struct lto_function_header);
5205 const int main_offset = cfg_offset + header->cfg_size;
5206 const int string_offset = main_offset + header->main_size;
5207 struct data_in *data_in;
5208 unsigned int i;
5209 unsigned int count;
5210
5211 lto_input_block ib_main ((const char *) data + main_offset,
5212 header->main_size);
5213
5214 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5215 header->string_size, vNULL);
5216 count = streamer_read_uhwi (&ib_main);
5217
5218 for (i = 0; i < count; i++)
5219 {
5220 unsigned int index;
5221 struct cgraph_node *node;
5222 lto_symtab_encoder_t encoder;
5223
5224 index = streamer_read_uhwi (&ib_main);
5225 encoder = file_data->symtab_node_encoder;
5226 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5227 index));
5228 gcc_assert (node->definition);
5229 read_agg_replacement_chain (&ib_main, node, data_in);
5230 }
5231 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5232 len);
5233 lto_data_in_delete (data_in);
5234 }
5235
5236 /* Read IPA-CP aggregate replacements. */
5237
5238 void
5239 ipa_prop_read_all_agg_replacement (void)
5240 {
5241 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5242 struct lto_file_decl_data *file_data;
5243 unsigned int j = 0;
5244
5245 while ((file_data = file_data_vec[j++]))
5246 {
5247 size_t len;
5248 const char *data = lto_get_section_data (file_data,
5249 LTO_section_ipcp_transform,
5250 NULL, &len);
5251 if (data)
5252 read_replacements_section (file_data, data, len);
5253 }
5254 }
5255
5256 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5257 NODE. */
5258
5259 static void
5260 adjust_agg_replacement_values (struct cgraph_node *node,
5261 struct ipa_agg_replacement_value *aggval)
5262 {
5263 struct ipa_agg_replacement_value *v;
5264 int i, c = 0, d = 0, *adj;
5265
5266 if (!node->clone.combined_args_to_skip)
5267 return;
5268
5269 for (v = aggval; v; v = v->next)
5270 {
5271 gcc_assert (v->index >= 0);
5272 if (c < v->index)
5273 c = v->index;
5274 }
5275 c++;
5276
5277 adj = XALLOCAVEC (int, c);
5278 for (i = 0; i < c; i++)
5279 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5280 {
5281 adj[i] = -1;
5282 d++;
5283 }
5284 else
5285 adj[i] = i - d;
5286
5287 for (v = aggval; v; v = v->next)
5288 v->index = adj[v->index];
5289 }
5290
5291 /* Dominator walker driving the ipcp modification phase. */
5292
5293 class ipcp_modif_dom_walker : public dom_walker
5294 {
5295 public:
5296 ipcp_modif_dom_walker (struct func_body_info *fbi,
5297 vec<ipa_param_descriptor> descs,
5298 struct ipa_agg_replacement_value *av,
5299 bool *sc, bool *cc)
5300 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5301 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5302
5303 virtual void before_dom_children (basic_block);
5304
5305 private:
5306 struct func_body_info *m_fbi;
5307 vec<ipa_param_descriptor> m_descriptors;
5308 struct ipa_agg_replacement_value *m_aggval;
5309 bool *m_something_changed, *m_cfg_changed;
5310 };
5311
5312 void
5313 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5314 {
5315 gimple_stmt_iterator gsi;
5316 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5317 {
5318 struct ipa_agg_replacement_value *v;
5319 gimple stmt = gsi_stmt (gsi);
5320 tree rhs, val, t;
5321 HOST_WIDE_INT offset, size;
5322 int index;
5323 bool by_ref, vce;
5324
5325 if (!gimple_assign_load_p (stmt))
5326 continue;
5327 rhs = gimple_assign_rhs1 (stmt);
5328 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5329 continue;
5330
5331 vce = false;
5332 t = rhs;
5333 while (handled_component_p (t))
5334 {
5335 /* V_C_E can do things like convert an array of integers to one
5336 bigger integer and similar things we do not handle below. */
5337 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5338 {
5339 vce = true;
5340 break;
5341 }
5342 t = TREE_OPERAND (t, 0);
5343 }
5344 if (vce)
5345 continue;
5346
5347 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5348 &offset, &size, &by_ref))
5349 continue;
5350 for (v = m_aggval; v; v = v->next)
5351 if (v->index == index
5352 && v->offset == offset)
5353 break;
5354 if (!v
5355 || v->by_ref != by_ref
5356 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5357 continue;
5358
5359 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5360 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5361 {
5362 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5363 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5364 else if (TYPE_SIZE (TREE_TYPE (rhs))
5365 == TYPE_SIZE (TREE_TYPE (v->value)))
5366 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5367 else
5368 {
5369 if (dump_file)
5370 {
5371 fprintf (dump_file, " const ");
5372 print_generic_expr (dump_file, v->value, 0);
5373 fprintf (dump_file, " can't be converted to type of ");
5374 print_generic_expr (dump_file, rhs, 0);
5375 fprintf (dump_file, "\n");
5376 }
5377 continue;
5378 }
5379 }
5380 else
5381 val = v->value;
5382
5383 if (dump_file && (dump_flags & TDF_DETAILS))
5384 {
5385 fprintf (dump_file, "Modifying stmt:\n ");
5386 print_gimple_stmt (dump_file, stmt, 0, 0);
5387 }
5388 gimple_assign_set_rhs_from_tree (&gsi, val);
5389 update_stmt (stmt);
5390
5391 if (dump_file && (dump_flags & TDF_DETAILS))
5392 {
5393 fprintf (dump_file, "into:\n ");
5394 print_gimple_stmt (dump_file, stmt, 0, 0);
5395 fprintf (dump_file, "\n");
5396 }
5397
5398 *m_something_changed = true;
5399 if (maybe_clean_eh_stmt (stmt)
5400 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5401 *m_cfg_changed = true;
5402 }
5403
5404 }
5405
5406 /* IPCP transformation phase doing propagation of aggregate values. */
5407
5408 unsigned int
5409 ipcp_transform_function (struct cgraph_node *node)
5410 {
5411 vec<ipa_param_descriptor> descriptors = vNULL;
5412 struct func_body_info fbi;
5413 struct ipa_agg_replacement_value *aggval;
5414 int param_count;
5415 bool cfg_changed = false, something_changed = false;
5416
5417 gcc_checking_assert (cfun);
5418 gcc_checking_assert (current_function_decl);
5419
5420 if (dump_file)
5421 fprintf (dump_file, "Modification phase of node %s/%i\n",
5422 node->name (), node->order);
5423
5424 aggval = ipa_get_agg_replacements_for_node (node);
5425 if (!aggval)
5426 return 0;
5427 param_count = count_formal_params (node->decl);
5428 if (param_count == 0)
5429 return 0;
5430 adjust_agg_replacement_values (node, aggval);
5431 if (dump_file)
5432 ipa_dump_agg_replacement_values (dump_file, aggval);
5433
5434 fbi.node = node;
5435 fbi.info = NULL;
5436 fbi.bb_infos = vNULL;
5437 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5438 fbi.param_count = param_count;
5439 fbi.aa_walked = 0;
5440
5441 descriptors.safe_grow_cleared (param_count);
5442 ipa_populate_param_decls (node, descriptors);
5443 calculate_dominance_info (CDI_DOMINATORS);
5444 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5445 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5446
5447 int i;
5448 struct ipa_bb_info *bi;
5449 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5450 free_ipa_bb_info (bi);
5451 fbi.bb_infos.release ();
5452 free_dominance_info (CDI_DOMINATORS);
5453 (*ipa_node_agg_replacements)[node->uid] = NULL;
5454 descriptors.release ();
5455
5456 if (!something_changed)
5457 return 0;
5458 else if (cfg_changed)
5459 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5460 else
5461 return TODO_update_ssa_only_virtuals;
5462 }