ipa-polymorphic-call.c (possible_placement_new): Fix condition on size.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "dbgcnt.h"
63 #include "domwalk.h"
64 #include "builtins.h"
65 #include "calls.h"
66
67 /* Intermediate information that we get from alias analysis about a particular
68 parameter in a particular basic_block. When a parameter or the memory it
69 references is marked modified, we use that information in all dominatd
70 blocks without cosulting alias analysis oracle. */
71
72 struct param_aa_status
73 {
74 /* Set when this structure contains meaningful information. If not, the
75 structure describing a dominating BB should be used instead. */
76 bool valid;
77
78 /* Whether we have seen something which might have modified the data in
79 question. PARM is for the parameter itself, REF is for data it points to
80 but using the alias type of individual accesses and PT is the same thing
81 but for computing aggregate pass-through functions using a very inclusive
82 ao_ref. */
83 bool parm_modified, ref_modified, pt_modified;
84 };
85
86 /* Information related to a given BB that used only when looking at function
87 body. */
88
89 struct ipa_bb_info
90 {
91 /* Call graph edges going out of this BB. */
92 vec<cgraph_edge *> cg_edges;
93 /* Alias analysis statuses of each formal parameter at this bb. */
94 vec<param_aa_status> param_aa_statuses;
95 };
96
97 /* Structure with global information that is only used when looking at function
98 body. */
99
100 struct func_body_info
101 {
102 /* The node that is being analyzed. */
103 cgraph_node *node;
104
105 /* Its info. */
106 struct ipa_node_params *info;
107
108 /* Information about individual BBs. */
109 vec<ipa_bb_info> bb_infos;
110
111 /* Number of parameters. */
112 int param_count;
113
114 /* Number of statements already walked by when analyzing this function. */
115 unsigned int aa_walked;
116 };
117
118 /* Vector where the parameter infos are actually stored. */
119 vec<ipa_node_params> ipa_node_params_vector;
120 /* Vector of known aggregate values in cloned nodes. */
121 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
122 /* Vector where the parameter infos are actually stored. */
123 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
124
125 /* Holders of ipa cgraph hooks: */
126 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
127 static struct cgraph_node_hook_list *node_removal_hook_holder;
128 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
129 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
130 static struct cgraph_node_hook_list *function_insertion_hook_holder;
131
132 /* Description of a reference to an IPA constant. */
133 struct ipa_cst_ref_desc
134 {
135 /* Edge that corresponds to the statement which took the reference. */
136 struct cgraph_edge *cs;
137 /* Linked list of duplicates created when call graph edges are cloned. */
138 struct ipa_cst_ref_desc *next_duplicate;
139 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
140 if out of control. */
141 int refcount;
142 };
143
144 /* Allocation pool for reference descriptions. */
145
146 static alloc_pool ipa_refdesc_pool;
147
148 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
149 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
150
151 static bool
152 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
153 {
154 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
155 struct cl_optimization *os;
156
157 if (!fs_opts)
158 return false;
159 os = TREE_OPTIMIZATION (fs_opts);
160 return !os->x_optimize || !os->x_flag_ipa_cp;
161 }
162
163 /* Return index of the formal whose tree is PTREE in function which corresponds
164 to INFO. */
165
166 static int
167 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
168 {
169 int i, count;
170
171 count = descriptors.length ();
172 for (i = 0; i < count; i++)
173 if (descriptors[i].decl == ptree)
174 return i;
175
176 return -1;
177 }
178
179 /* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
181
182 int
183 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
184 {
185 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
186 }
187
188 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
189 NODE. */
190
191 static void
192 ipa_populate_param_decls (struct cgraph_node *node,
193 vec<ipa_param_descriptor> &descriptors)
194 {
195 tree fndecl;
196 tree fnargs;
197 tree parm;
198 int param_num;
199
200 fndecl = node->decl;
201 gcc_assert (gimple_has_body_p (fndecl));
202 fnargs = DECL_ARGUMENTS (fndecl);
203 param_num = 0;
204 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
205 {
206 descriptors[param_num].decl = parm;
207 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
208 true);
209 param_num++;
210 }
211 }
212
213 /* Return how many formal parameters FNDECL has. */
214
215 int
216 count_formal_params (tree fndecl)
217 {
218 tree parm;
219 int count = 0;
220 gcc_assert (gimple_has_body_p (fndecl));
221
222 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
223 count++;
224
225 return count;
226 }
227
228 /* Return the declaration of Ith formal parameter of the function corresponding
229 to INFO. Note there is no setter function as this array is built just once
230 using ipa_initialize_node_params. */
231
232 void
233 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
234 {
235 fprintf (file, "param #%i", i);
236 if (info->descriptors[i].decl)
237 {
238 fprintf (file, " ");
239 print_generic_expr (file, info->descriptors[i].decl, 0);
240 }
241 }
242
243 /* Initialize the ipa_node_params structure associated with NODE
244 to hold PARAM_COUNT parameters. */
245
246 void
247 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
248 {
249 struct ipa_node_params *info = IPA_NODE_REF (node);
250
251 if (!info->descriptors.exists () && param_count)
252 info->descriptors.safe_grow_cleared (param_count);
253 }
254
255 /* Initialize the ipa_node_params structure associated with NODE by counting
256 the function parameters, creating the descriptors and populating their
257 param_decls. */
258
259 void
260 ipa_initialize_node_params (struct cgraph_node *node)
261 {
262 struct ipa_node_params *info = IPA_NODE_REF (node);
263
264 if (!info->descriptors.exists ())
265 {
266 ipa_alloc_node_params (node, count_formal_params (node->decl));
267 ipa_populate_param_decls (node, info->descriptors);
268 }
269 }
270
271 /* Print the jump functions associated with call graph edge CS to file F. */
272
273 static void
274 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
275 {
276 int i, count;
277
278 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
279 for (i = 0; i < count; i++)
280 {
281 struct ipa_jump_func *jump_func;
282 enum jump_func_type type;
283
284 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
285 type = jump_func->type;
286
287 fprintf (f, " param %d: ", i);
288 if (type == IPA_JF_UNKNOWN)
289 fprintf (f, "UNKNOWN\n");
290 else if (type == IPA_JF_KNOWN_TYPE)
291 {
292 fprintf (f, "KNOWN TYPE: base ");
293 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
294 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
295 jump_func->value.known_type.offset);
296 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
297 fprintf (f, "\n");
298 }
299 else if (type == IPA_JF_CONST)
300 {
301 tree val = jump_func->value.constant.value;
302 fprintf (f, "CONST: ");
303 print_generic_expr (f, val, 0);
304 if (TREE_CODE (val) == ADDR_EXPR
305 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
306 {
307 fprintf (f, " -> ");
308 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
309 0);
310 }
311 fprintf (f, "\n");
312 }
313 else if (type == IPA_JF_PASS_THROUGH)
314 {
315 fprintf (f, "PASS THROUGH: ");
316 fprintf (f, "%d, op %s",
317 jump_func->value.pass_through.formal_id,
318 get_tree_code_name(jump_func->value.pass_through.operation));
319 if (jump_func->value.pass_through.operation != NOP_EXPR)
320 {
321 fprintf (f, " ");
322 print_generic_expr (f,
323 jump_func->value.pass_through.operand, 0);
324 }
325 if (jump_func->value.pass_through.agg_preserved)
326 fprintf (f, ", agg_preserved");
327 if (jump_func->value.pass_through.type_preserved)
328 fprintf (f, ", type_preserved");
329 fprintf (f, "\n");
330 }
331 else if (type == IPA_JF_ANCESTOR)
332 {
333 fprintf (f, "ANCESTOR: ");
334 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
335 jump_func->value.ancestor.formal_id,
336 jump_func->value.ancestor.offset);
337 print_generic_expr (f, jump_func->value.ancestor.type, 0);
338 if (jump_func->value.ancestor.agg_preserved)
339 fprintf (f, ", agg_preserved");
340 if (jump_func->value.ancestor.type_preserved)
341 fprintf (f, ", type_preserved");
342 fprintf (f, "\n");
343 }
344
345 if (jump_func->agg.items)
346 {
347 struct ipa_agg_jf_item *item;
348 int j;
349
350 fprintf (f, " Aggregate passed by %s:\n",
351 jump_func->agg.by_ref ? "reference" : "value");
352 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
353 {
354 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
355 item->offset);
356 if (TYPE_P (item->value))
357 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
358 tree_to_uhwi (TYPE_SIZE (item->value)));
359 else
360 {
361 fprintf (f, "cst: ");
362 print_generic_expr (f, item->value, 0);
363 }
364 fprintf (f, "\n");
365 }
366 }
367 if (IPA_EDGE_REF (cs)->polymorphic_call_contexts)
368 ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i)->dump (f);
369 }
370 }
371
372
373 /* Print the jump functions of all arguments on all call graph edges going from
374 NODE to file F. */
375
376 void
377 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
378 {
379 struct cgraph_edge *cs;
380
381 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
382 node->order);
383 for (cs = node->callees; cs; cs = cs->next_callee)
384 {
385 if (!ipa_edge_args_info_available_for_edge_p (cs))
386 continue;
387
388 fprintf (f, " callsite %s/%i -> %s/%i : \n",
389 xstrdup (node->name ()), node->order,
390 xstrdup (cs->callee->name ()),
391 cs->callee->order);
392 ipa_print_node_jump_functions_for_edge (f, cs);
393 }
394
395 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
396 {
397 struct cgraph_indirect_call_info *ii;
398 if (!ipa_edge_args_info_available_for_edge_p (cs))
399 continue;
400
401 ii = cs->indirect_info;
402 if (ii->agg_contents)
403 fprintf (f, " indirect %s callsite, calling param %i, "
404 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
405 ii->member_ptr ? "member ptr" : "aggregate",
406 ii->param_index, ii->offset,
407 ii->by_ref ? "by reference" : "by_value");
408 else
409 fprintf (f, " indirect %s callsite, calling param %i, "
410 "offset " HOST_WIDE_INT_PRINT_DEC,
411 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
412 ii->offset);
413
414 if (cs->call_stmt)
415 {
416 fprintf (f, ", for stmt ");
417 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
418 }
419 else
420 fprintf (f, "\n");
421 if (ii->polymorphic)
422 ii->context.dump (f);
423 ipa_print_node_jump_functions_for_edge (f, cs);
424 }
425 }
426
427 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
428
429 void
430 ipa_print_all_jump_functions (FILE *f)
431 {
432 struct cgraph_node *node;
433
434 fprintf (f, "\nJump functions:\n");
435 FOR_EACH_FUNCTION (node)
436 {
437 ipa_print_node_jump_functions (f, node);
438 }
439 }
440
441 /* Set JFUNC to be a known type jump function. */
442
443 static void
444 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
445 tree base_type, tree component_type)
446 {
447 /* Recording and propagating main variants increases change that types
448 will match. */
449 base_type = TYPE_MAIN_VARIANT (base_type);
450 component_type = TYPE_MAIN_VARIANT (component_type);
451
452 gcc_assert (contains_polymorphic_type_p (base_type)
453 && contains_polymorphic_type_p (component_type));
454 if (!flag_devirtualize)
455 return;
456 jfunc->type = IPA_JF_KNOWN_TYPE;
457 jfunc->value.known_type.offset = offset,
458 jfunc->value.known_type.base_type = base_type;
459 jfunc->value.known_type.component_type = component_type;
460 gcc_assert (component_type);
461 }
462
463 /* Set JFUNC to be a copy of another jmp (to be used by jump function
464 combination code). The two functions will share their rdesc. */
465
466 static void
467 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
468 struct ipa_jump_func *src)
469
470 {
471 gcc_checking_assert (src->type == IPA_JF_CONST);
472 dst->type = IPA_JF_CONST;
473 dst->value.constant = src->value.constant;
474 }
475
476 /* Set JFUNC to be a constant jmp function. */
477
478 static void
479 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
480 struct cgraph_edge *cs)
481 {
482 constant = unshare_expr (constant);
483 if (constant && EXPR_P (constant))
484 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
485 jfunc->type = IPA_JF_CONST;
486 jfunc->value.constant.value = unshare_expr_without_location (constant);
487
488 if (TREE_CODE (constant) == ADDR_EXPR
489 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
490 {
491 struct ipa_cst_ref_desc *rdesc;
492 if (!ipa_refdesc_pool)
493 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
494 sizeof (struct ipa_cst_ref_desc), 32);
495
496 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
497 rdesc->cs = cs;
498 rdesc->next_duplicate = NULL;
499 rdesc->refcount = 1;
500 jfunc->value.constant.rdesc = rdesc;
501 }
502 else
503 jfunc->value.constant.rdesc = NULL;
504 }
505
506 /* Set JFUNC to be a simple pass-through jump function. */
507 static void
508 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
509 bool agg_preserved, bool type_preserved)
510 {
511 jfunc->type = IPA_JF_PASS_THROUGH;
512 jfunc->value.pass_through.operand = NULL_TREE;
513 jfunc->value.pass_through.formal_id = formal_id;
514 jfunc->value.pass_through.operation = NOP_EXPR;
515 jfunc->value.pass_through.agg_preserved = agg_preserved;
516 jfunc->value.pass_through.type_preserved = type_preserved;
517 }
518
519 /* Set JFUNC to be an arithmetic pass through jump function. */
520
521 static void
522 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
523 tree operand, enum tree_code operation)
524 {
525 jfunc->type = IPA_JF_PASS_THROUGH;
526 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
527 jfunc->value.pass_through.formal_id = formal_id;
528 jfunc->value.pass_through.operation = operation;
529 jfunc->value.pass_through.agg_preserved = false;
530 jfunc->value.pass_through.type_preserved = false;
531 }
532
533 /* Set JFUNC to be an ancestor jump function. */
534
535 static void
536 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
537 tree type, int formal_id, bool agg_preserved,
538 bool type_preserved)
539 {
540 if (!flag_devirtualize)
541 type_preserved = false;
542 if (!type_preserved)
543 type = NULL_TREE;
544 if (type)
545 type = TYPE_MAIN_VARIANT (type);
546 gcc_assert (!type_preserved || contains_polymorphic_type_p (type));
547 jfunc->type = IPA_JF_ANCESTOR;
548 jfunc->value.ancestor.formal_id = formal_id;
549 jfunc->value.ancestor.offset = offset;
550 jfunc->value.ancestor.type = type_preserved ? type : NULL;
551 jfunc->value.ancestor.agg_preserved = agg_preserved;
552 jfunc->value.ancestor.type_preserved = type_preserved;
553 }
554
555 /* Extract the acual BINFO being described by JFUNC which must be a known type
556 jump function. */
557
558 tree
559 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
560 {
561 if (!RECORD_OR_UNION_TYPE_P (jfunc->value.known_type.base_type))
562 return NULL_TREE;
563
564 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
565
566 if (!base_binfo)
567 return NULL_TREE;
568 /* FIXME: At LTO we can't propagate to non-polymorphic type, because
569 we have no ODR equivalency on those. This should be fixed by
570 propagating on types rather than binfos that would make type
571 matching here unnecesary. */
572 if (in_lto_p
573 && (TREE_CODE (jfunc->value.known_type.component_type) != RECORD_TYPE
574 || !TYPE_BINFO (jfunc->value.known_type.component_type)
575 || !BINFO_VTABLE (TYPE_BINFO (jfunc->value.known_type.component_type))))
576 {
577 if (!jfunc->value.known_type.offset)
578 return base_binfo;
579 return NULL;
580 }
581 return get_binfo_at_offset (base_binfo,
582 jfunc->value.known_type.offset,
583 jfunc->value.known_type.component_type);
584 }
585
586 /* Get IPA BB information about the given BB. FBI is the context of analyzis
587 of this function body. */
588
589 static struct ipa_bb_info *
590 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
591 {
592 gcc_checking_assert (fbi);
593 return &fbi->bb_infos[bb->index];
594 }
595
596 /* Structure to be passed in between detect_type_change and
597 check_stmt_for_type_change. */
598
599 struct prop_type_change_info
600 {
601 /* Offset into the object where there is the virtual method pointer we are
602 looking for. */
603 HOST_WIDE_INT offset;
604 /* The declaration or SSA_NAME pointer of the base that we are checking for
605 type change. */
606 tree object;
607 /* If we actually can tell the type that the object has changed to, it is
608 stored in this field. Otherwise it remains NULL_TREE. */
609 tree known_current_type;
610 /* Set to true if dynamic type change has been detected. */
611 bool type_maybe_changed;
612 /* Set to true if multiple types have been encountered. known_current_type
613 must be disregarded in that case. */
614 bool multiple_types_encountered;
615 };
616
617 /* Return true if STMT can modify a virtual method table pointer.
618
619 This function makes special assumptions about both constructors and
620 destructors which are all the functions that are allowed to alter the VMT
621 pointers. It assumes that destructors begin with assignment into all VMT
622 pointers and that constructors essentially look in the following way:
623
624 1) The very first thing they do is that they call constructors of ancestor
625 sub-objects that have them.
626
627 2) Then VMT pointers of this and all its ancestors is set to new values
628 corresponding to the type corresponding to the constructor.
629
630 3) Only afterwards, other stuff such as constructor of member sub-objects
631 and the code written by the user is run. Only this may include calling
632 virtual functions, directly or indirectly.
633
634 There is no way to call a constructor of an ancestor sub-object in any
635 other way.
636
637 This means that we do not have to care whether constructors get the correct
638 type information because they will always change it (in fact, if we define
639 the type to be given by the VMT pointer, it is undefined).
640
641 The most important fact to derive from the above is that if, for some
642 statement in the section 3, we try to detect whether the dynamic type has
643 changed, we can safely ignore all calls as we examine the function body
644 backwards until we reach statements in section 2 because these calls cannot
645 be ancestor constructors or destructors (if the input is not bogus) and so
646 do not change the dynamic type (this holds true only for automatically
647 allocated objects but at the moment we devirtualize only these). We then
648 must detect that statements in section 2 change the dynamic type and can try
649 to derive the new type. That is enough and we can stop, we will never see
650 the calls into constructors of sub-objects in this code. Therefore we can
651 safely ignore all call statements that we traverse.
652 */
653
654 static bool
655 stmt_may_be_vtbl_ptr_store (gimple stmt)
656 {
657 if (is_gimple_call (stmt))
658 return false;
659 if (gimple_clobber_p (stmt))
660 return false;
661 else if (is_gimple_assign (stmt))
662 {
663 tree lhs = gimple_assign_lhs (stmt);
664
665 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
666 {
667 if (flag_strict_aliasing
668 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
669 return false;
670
671 if (TREE_CODE (lhs) == COMPONENT_REF
672 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
673 return false;
674 /* In the future we might want to use get_base_ref_and_offset to find
675 if there is a field corresponding to the offset and if so, proceed
676 almost like if it was a component ref. */
677 }
678 }
679 return true;
680 }
681
682 /* If STMT can be proved to be an assignment to the virtual method table
683 pointer of ANALYZED_OBJ and the type associated with the new table
684 identified, return the type. Otherwise return NULL_TREE. */
685
686 static tree
687 extr_type_from_vtbl_ptr_store (gimple stmt, struct prop_type_change_info *tci)
688 {
689 HOST_WIDE_INT offset, size, max_size;
690 tree lhs, rhs, base, binfo;
691
692 if (!gimple_assign_single_p (stmt))
693 return NULL_TREE;
694
695 lhs = gimple_assign_lhs (stmt);
696 rhs = gimple_assign_rhs1 (stmt);
697 if (TREE_CODE (lhs) != COMPONENT_REF
698 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
699 return NULL_TREE;
700
701 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
702 if (offset != tci->offset
703 || size != POINTER_SIZE
704 || max_size != POINTER_SIZE)
705 return NULL_TREE;
706 if (TREE_CODE (base) == MEM_REF)
707 {
708 if (TREE_CODE (tci->object) != MEM_REF
709 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
710 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
711 TREE_OPERAND (base, 1)))
712 return NULL_TREE;
713 }
714 else if (tci->object != base)
715 return NULL_TREE;
716
717 binfo = vtable_pointer_value_to_binfo (rhs);
718
719 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
720 base of outer type. In this case we would need to either
721 work on binfos or translate it back to outer type and offset.
722 KNOWN_TYPE jump functions are not ready for that, yet. */
723 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
724 return NULL;
725
726 return BINFO_TYPE (binfo);
727 }
728
729 /* Callback of walk_aliased_vdefs and a helper function for
730 detect_type_change to check whether a particular statement may modify
731 the virtual table pointer, and if possible also determine the new type of
732 the (sub-)object. It stores its result into DATA, which points to a
733 prop_type_change_info structure. */
734
735 static bool
736 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
737 {
738 gimple stmt = SSA_NAME_DEF_STMT (vdef);
739 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
740
741 if (stmt_may_be_vtbl_ptr_store (stmt))
742 {
743 tree type;
744
745 type = extr_type_from_vtbl_ptr_store (stmt, tci);
746 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
747 if (tci->type_maybe_changed
748 && type != tci->known_current_type)
749 tci->multiple_types_encountered = true;
750 tci->known_current_type = type;
751 tci->type_maybe_changed = true;
752 return true;
753 }
754 else
755 return false;
756 }
757
758 /* See if ARG is PARAM_DECl describing instance passed by pointer
759 or reference in FUNCTION. Return false if the dynamic type may change
760 in between beggining of the function until CALL is invoked.
761
762 Generally functions are not allowed to change type of such instances,
763 but they call destructors. We assume that methods can not destroy the THIS
764 pointer. Also as a special cases, constructor and destructors may change
765 type of the THIS pointer. */
766
767 static bool
768 param_type_may_change_p (tree function, tree arg, gimple call)
769 {
770 /* Pure functions can not do any changes on the dynamic type;
771 that require writting to memory. */
772 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
773 return false;
774 /* We need to check if we are within inlined consturctor
775 or destructor (ideally we would have way to check that the
776 inline cdtor is actually working on ARG, but we don't have
777 easy tie on this, so punt on all non-pure cdtors.
778 We may also record the types of cdtors and once we know type
779 of the instance match them.
780
781 Also code unification optimizations may merge calls from
782 different blocks making return values unreliable. So
783 do nothing during late optimization. */
784 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
785 return true;
786 if (TREE_CODE (arg) == SSA_NAME
787 && SSA_NAME_IS_DEFAULT_DEF (arg)
788 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
789 {
790 /* Normal (non-THIS) argument. */
791 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
792 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
793 /* THIS pointer of an method - here we we want to watch constructors
794 and destructors as those definitely may change the dynamic
795 type. */
796 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
797 && !DECL_CXX_CONSTRUCTOR_P (function)
798 && !DECL_CXX_DESTRUCTOR_P (function)
799 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
800 {
801 /* Walk the inline stack and watch out for ctors/dtors. */
802 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
803 block = BLOCK_SUPERCONTEXT (block))
804 if (BLOCK_ABSTRACT_ORIGIN (block)
805 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
806 {
807 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
808
809 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
810 continue;
811 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
812 && (DECL_CXX_CONSTRUCTOR_P (fn)
813 || DECL_CXX_DESTRUCTOR_P (fn)))
814 return true;
815 }
816 return false;
817 }
818 }
819 return true;
820 }
821
822 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
823 callsite CALL) by looking for assignments to its virtual table pointer. If
824 it is, return true and fill in the jump function JFUNC with relevant type
825 information or set it to unknown. ARG is the object itself (not a pointer
826 to it, unless dereferenced). BASE is the base of the memory access as
827 returned by get_ref_base_and_extent, as is the offset.
828
829 This is helper function for detect_type_change and detect_type_change_ssa
830 that does the heavy work which is usually unnecesary. */
831
832 static bool
833 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
834 gimple call, struct ipa_jump_func *jfunc,
835 HOST_WIDE_INT offset)
836 {
837 struct prop_type_change_info tci;
838 ao_ref ao;
839 bool entry_reached = false;
840
841 gcc_checking_assert (DECL_P (arg)
842 || TREE_CODE (arg) == MEM_REF
843 || handled_component_p (arg));
844
845 comp_type = TYPE_MAIN_VARIANT (comp_type);
846
847 /* Const calls cannot call virtual methods through VMT and so type changes do
848 not matter. */
849 if (!flag_devirtualize || !gimple_vuse (call)
850 /* Be sure expected_type is polymorphic. */
851 || !comp_type
852 || TREE_CODE (comp_type) != RECORD_TYPE
853 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
854 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
855 return true;
856
857 ao_ref_init (&ao, arg);
858 ao.base = base;
859 ao.offset = offset;
860 ao.size = POINTER_SIZE;
861 ao.max_size = ao.size;
862
863 tci.offset = offset;
864 tci.object = get_base_address (arg);
865 tci.known_current_type = NULL_TREE;
866 tci.type_maybe_changed = false;
867 tci.multiple_types_encountered = false;
868
869 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
870 &tci, NULL, &entry_reached);
871 if (!tci.type_maybe_changed)
872 return false;
873
874 if (!tci.known_current_type
875 || tci.multiple_types_encountered
876 || offset != 0
877 /* When the walk reached function entry, it means that type
878 is set along some paths but not along others. */
879 || entry_reached)
880 jfunc->type = IPA_JF_UNKNOWN;
881 else
882 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
883
884 return true;
885 }
886
887 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
888 If it is, return true and fill in the jump function JFUNC with relevant type
889 information or set it to unknown. ARG is the object itself (not a pointer
890 to it, unless dereferenced). BASE is the base of the memory access as
891 returned by get_ref_base_and_extent, as is the offset. */
892
893 static bool
894 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
895 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
896 {
897 if (!flag_devirtualize)
898 return false;
899
900 if (TREE_CODE (base) == MEM_REF
901 && !param_type_may_change_p (current_function_decl,
902 TREE_OPERAND (base, 0),
903 call))
904 return false;
905 return detect_type_change_from_memory_writes (arg, base, comp_type,
906 call, jfunc, offset);
907 }
908
909 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
910 SSA name (its dereference will become the base and the offset is assumed to
911 be zero). */
912
913 static bool
914 detect_type_change_ssa (tree arg, tree comp_type,
915 gimple call, struct ipa_jump_func *jfunc)
916 {
917 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
918 if (!flag_devirtualize
919 || !POINTER_TYPE_P (TREE_TYPE (arg)))
920 return false;
921
922 if (!param_type_may_change_p (current_function_decl, arg, call))
923 return false;
924
925 arg = build2 (MEM_REF, ptr_type_node, arg,
926 build_int_cst (ptr_type_node, 0));
927
928 return detect_type_change_from_memory_writes (arg, arg, comp_type,
929 call, jfunc, 0);
930 }
931
932 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
933 boolean variable pointed to by DATA. */
934
935 static bool
936 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
937 void *data)
938 {
939 bool *b = (bool *) data;
940 *b = true;
941 return true;
942 }
943
944 /* Return true if we have already walked so many statements in AA that we
945 should really just start giving up. */
946
947 static bool
948 aa_overwalked (struct func_body_info *fbi)
949 {
950 gcc_checking_assert (fbi);
951 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
952 }
953
954 /* Find the nearest valid aa status for parameter specified by INDEX that
955 dominates BB. */
956
957 static struct param_aa_status *
958 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
959 int index)
960 {
961 while (true)
962 {
963 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
964 if (!bb)
965 return NULL;
966 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
967 if (!bi->param_aa_statuses.is_empty ()
968 && bi->param_aa_statuses[index].valid)
969 return &bi->param_aa_statuses[index];
970 }
971 }
972
973 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
974 structures and/or intialize the result with a dominating description as
975 necessary. */
976
977 static struct param_aa_status *
978 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
979 int index)
980 {
981 gcc_checking_assert (fbi);
982 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
983 if (bi->param_aa_statuses.is_empty ())
984 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
985 struct param_aa_status *paa = &bi->param_aa_statuses[index];
986 if (!paa->valid)
987 {
988 gcc_checking_assert (!paa->parm_modified
989 && !paa->ref_modified
990 && !paa->pt_modified);
991 struct param_aa_status *dom_paa;
992 dom_paa = find_dominating_aa_status (fbi, bb, index);
993 if (dom_paa)
994 *paa = *dom_paa;
995 else
996 paa->valid = true;
997 }
998
999 return paa;
1000 }
1001
1002 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1003 a value known not to be modified in this function before reaching the
1004 statement STMT. FBI holds information about the function we have so far
1005 gathered but do not survive the summary building stage. */
1006
1007 static bool
1008 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
1009 gimple stmt, tree parm_load)
1010 {
1011 struct param_aa_status *paa;
1012 bool modified = false;
1013 ao_ref refd;
1014
1015 /* FIXME: FBI can be NULL if we are being called from outside
1016 ipa_node_analysis or ipcp_transform_function, which currently happens
1017 during inlining analysis. It would be great to extend fbi's lifetime and
1018 always have it. Currently, we are just not afraid of too much walking in
1019 that case. */
1020 if (fbi)
1021 {
1022 if (aa_overwalked (fbi))
1023 return false;
1024 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1025 if (paa->parm_modified)
1026 return false;
1027 }
1028 else
1029 paa = NULL;
1030
1031 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1032 ao_ref_init (&refd, parm_load);
1033 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1034 &modified, NULL);
1035 if (fbi)
1036 fbi->aa_walked += walked;
1037 if (paa && modified)
1038 paa->parm_modified = true;
1039 return !modified;
1040 }
1041
1042 /* If STMT is an assignment that loads a value from an parameter declaration,
1043 return the index of the parameter in ipa_node_params which has not been
1044 modified. Otherwise return -1. */
1045
1046 static int
1047 load_from_unmodified_param (struct func_body_info *fbi,
1048 vec<ipa_param_descriptor> descriptors,
1049 gimple stmt)
1050 {
1051 int index;
1052 tree op1;
1053
1054 if (!gimple_assign_single_p (stmt))
1055 return -1;
1056
1057 op1 = gimple_assign_rhs1 (stmt);
1058 if (TREE_CODE (op1) != PARM_DECL)
1059 return -1;
1060
1061 index = ipa_get_param_decl_index_1 (descriptors, op1);
1062 if (index < 0
1063 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1064 return -1;
1065
1066 return index;
1067 }
1068
1069 /* Return true if memory reference REF (which must be a load through parameter
1070 with INDEX) loads data that are known to be unmodified in this function
1071 before reaching statement STMT. */
1072
1073 static bool
1074 parm_ref_data_preserved_p (struct func_body_info *fbi,
1075 int index, gimple stmt, tree ref)
1076 {
1077 struct param_aa_status *paa;
1078 bool modified = false;
1079 ao_ref refd;
1080
1081 /* FIXME: FBI can be NULL if we are being called from outside
1082 ipa_node_analysis or ipcp_transform_function, which currently happens
1083 during inlining analysis. It would be great to extend fbi's lifetime and
1084 always have it. Currently, we are just not afraid of too much walking in
1085 that case. */
1086 if (fbi)
1087 {
1088 if (aa_overwalked (fbi))
1089 return false;
1090 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1091 if (paa->ref_modified)
1092 return false;
1093 }
1094 else
1095 paa = NULL;
1096
1097 gcc_checking_assert (gimple_vuse (stmt));
1098 ao_ref_init (&refd, ref);
1099 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1100 &modified, NULL);
1101 if (fbi)
1102 fbi->aa_walked += walked;
1103 if (paa && modified)
1104 paa->ref_modified = true;
1105 return !modified;
1106 }
1107
1108 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1109 is known to be unmodified in this function before reaching call statement
1110 CALL into which it is passed. FBI describes the function body. */
1111
1112 static bool
1113 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1114 gimple call, tree parm)
1115 {
1116 bool modified = false;
1117 ao_ref refd;
1118
1119 /* It's unnecessary to calculate anything about memory contnets for a const
1120 function because it is not goin to use it. But do not cache the result
1121 either. Also, no such calculations for non-pointers. */
1122 if (!gimple_vuse (call)
1123 || !POINTER_TYPE_P (TREE_TYPE (parm))
1124 || aa_overwalked (fbi))
1125 return false;
1126
1127 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1128 index);
1129 if (paa->pt_modified)
1130 return false;
1131
1132 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1133 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1134 &modified, NULL);
1135 fbi->aa_walked += walked;
1136 if (modified)
1137 paa->pt_modified = true;
1138 return !modified;
1139 }
1140
1141 /* Return true if we can prove that OP is a memory reference loading unmodified
1142 data from an aggregate passed as a parameter and if the aggregate is passed
1143 by reference, that the alias type of the load corresponds to the type of the
1144 formal parameter (so that we can rely on this type for TBAA in callers).
1145 INFO and PARMS_AINFO describe parameters of the current function (but the
1146 latter can be NULL), STMT is the load statement. If function returns true,
1147 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1148 within the aggregate and whether it is a load from a value passed by
1149 reference respectively. */
1150
1151 static bool
1152 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1153 vec<ipa_param_descriptor> descriptors,
1154 gimple stmt, tree op, int *index_p,
1155 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1156 bool *by_ref_p)
1157 {
1158 int index;
1159 HOST_WIDE_INT size, max_size;
1160 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1161
1162 if (max_size == -1 || max_size != size || *offset_p < 0)
1163 return false;
1164
1165 if (DECL_P (base))
1166 {
1167 int index = ipa_get_param_decl_index_1 (descriptors, base);
1168 if (index >= 0
1169 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1170 {
1171 *index_p = index;
1172 *by_ref_p = false;
1173 if (size_p)
1174 *size_p = size;
1175 return true;
1176 }
1177 return false;
1178 }
1179
1180 if (TREE_CODE (base) != MEM_REF
1181 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1182 || !integer_zerop (TREE_OPERAND (base, 1)))
1183 return false;
1184
1185 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1186 {
1187 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1188 index = ipa_get_param_decl_index_1 (descriptors, parm);
1189 }
1190 else
1191 {
1192 /* This branch catches situations where a pointer parameter is not a
1193 gimple register, for example:
1194
1195 void hip7(S*) (struct S * p)
1196 {
1197 void (*<T2e4>) (struct S *) D.1867;
1198 struct S * p.1;
1199
1200 <bb 2>:
1201 p.1_1 = p;
1202 D.1867_2 = p.1_1->f;
1203 D.1867_2 ();
1204 gdp = &p;
1205 */
1206
1207 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1208 index = load_from_unmodified_param (fbi, descriptors, def);
1209 }
1210
1211 if (index >= 0
1212 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1213 {
1214 *index_p = index;
1215 *by_ref_p = true;
1216 if (size_p)
1217 *size_p = size;
1218 return true;
1219 }
1220 return false;
1221 }
1222
1223 /* Just like the previous function, just without the param_analysis_info
1224 pointer, for users outside of this file. */
1225
1226 bool
1227 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1228 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1229 bool *by_ref_p)
1230 {
1231 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1232 offset_p, NULL, by_ref_p);
1233 }
1234
1235 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1236 of an assignment statement STMT, try to determine whether we are actually
1237 handling any of the following cases and construct an appropriate jump
1238 function into JFUNC if so:
1239
1240 1) The passed value is loaded from a formal parameter which is not a gimple
1241 register (most probably because it is addressable, the value has to be
1242 scalar) and we can guarantee the value has not changed. This case can
1243 therefore be described by a simple pass-through jump function. For example:
1244
1245 foo (int a)
1246 {
1247 int a.0;
1248
1249 a.0_2 = a;
1250 bar (a.0_2);
1251
1252 2) The passed value can be described by a simple arithmetic pass-through
1253 jump function. E.g.
1254
1255 foo (int a)
1256 {
1257 int D.2064;
1258
1259 D.2064_4 = a.1(D) + 4;
1260 bar (D.2064_4);
1261
1262 This case can also occur in combination of the previous one, e.g.:
1263
1264 foo (int a, int z)
1265 {
1266 int a.0;
1267 int D.2064;
1268
1269 a.0_3 = a;
1270 D.2064_4 = a.0_3 + 4;
1271 foo (D.2064_4);
1272
1273 3) The passed value is an address of an object within another one (which
1274 also passed by reference). Such situations are described by an ancestor
1275 jump function and describe situations such as:
1276
1277 B::foo() (struct B * const this)
1278 {
1279 struct A * D.1845;
1280
1281 D.1845_2 = &this_1(D)->D.1748;
1282 A::bar (D.1845_2);
1283
1284 INFO is the structure describing individual parameters access different
1285 stages of IPA optimizations. PARMS_AINFO contains the information that is
1286 only needed for intraprocedural analysis. */
1287
1288 static void
1289 compute_complex_assign_jump_func (struct func_body_info *fbi,
1290 struct ipa_node_params *info,
1291 struct ipa_jump_func *jfunc,
1292 gimple call, gimple stmt, tree name,
1293 tree param_type)
1294 {
1295 HOST_WIDE_INT offset, size, max_size;
1296 tree op1, tc_ssa, base, ssa;
1297 int index;
1298
1299 op1 = gimple_assign_rhs1 (stmt);
1300
1301 if (TREE_CODE (op1) == SSA_NAME)
1302 {
1303 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1304 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1305 else
1306 index = load_from_unmodified_param (fbi, info->descriptors,
1307 SSA_NAME_DEF_STMT (op1));
1308 tc_ssa = op1;
1309 }
1310 else
1311 {
1312 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1313 tc_ssa = gimple_assign_lhs (stmt);
1314 }
1315
1316 if (index >= 0)
1317 {
1318 tree op2 = gimple_assign_rhs2 (stmt);
1319
1320 if (op2)
1321 {
1322 if (!is_gimple_ip_invariant (op2)
1323 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1324 && !useless_type_conversion_p (TREE_TYPE (name),
1325 TREE_TYPE (op1))))
1326 return;
1327
1328 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1329 gimple_assign_rhs_code (stmt));
1330 }
1331 else if (gimple_assign_single_p (stmt))
1332 {
1333 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1334 bool type_p = false;
1335
1336 if (param_type && POINTER_TYPE_P (param_type))
1337 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1338 call, jfunc);
1339 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1340 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1341 }
1342 return;
1343 }
1344
1345 if (TREE_CODE (op1) != ADDR_EXPR)
1346 return;
1347 op1 = TREE_OPERAND (op1, 0);
1348 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1349 return;
1350 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1351 if (TREE_CODE (base) != MEM_REF
1352 /* If this is a varying address, punt. */
1353 || max_size == -1
1354 || max_size != size)
1355 return;
1356 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1357 ssa = TREE_OPERAND (base, 0);
1358 if (TREE_CODE (ssa) != SSA_NAME
1359 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1360 || offset < 0)
1361 return;
1362
1363 /* Dynamic types are changed in constructors and destructors. */
1364 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1365 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1366 {
1367 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1368 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1369 call, jfunc, offset));
1370 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1371 ipa_set_ancestor_jf (jfunc, offset,
1372 type_p ? TREE_TYPE (param_type) : NULL, index,
1373 parm_ref_data_pass_through_p (fbi, index,
1374 call, ssa), type_p);
1375 }
1376 }
1377
1378 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1379 it looks like:
1380
1381 iftmp.1_3 = &obj_2(D)->D.1762;
1382
1383 The base of the MEM_REF must be a default definition SSA NAME of a
1384 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1385 whole MEM_REF expression is returned and the offset calculated from any
1386 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1387 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1388
1389 static tree
1390 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1391 {
1392 HOST_WIDE_INT size, max_size;
1393 tree expr, parm, obj;
1394
1395 if (!gimple_assign_single_p (assign))
1396 return NULL_TREE;
1397 expr = gimple_assign_rhs1 (assign);
1398
1399 if (TREE_CODE (expr) != ADDR_EXPR)
1400 return NULL_TREE;
1401 expr = TREE_OPERAND (expr, 0);
1402 obj = expr;
1403 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1404
1405 if (TREE_CODE (expr) != MEM_REF
1406 /* If this is a varying address, punt. */
1407 || max_size == -1
1408 || max_size != size
1409 || *offset < 0)
1410 return NULL_TREE;
1411 parm = TREE_OPERAND (expr, 0);
1412 if (TREE_CODE (parm) != SSA_NAME
1413 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1414 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1415 return NULL_TREE;
1416
1417 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1418 *obj_p = obj;
1419 return expr;
1420 }
1421
1422
1423 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1424 statement PHI, try to find out whether NAME is in fact a
1425 multiple-inheritance typecast from a descendant into an ancestor of a formal
1426 parameter and thus can be described by an ancestor jump function and if so,
1427 write the appropriate function into JFUNC.
1428
1429 Essentially we want to match the following pattern:
1430
1431 if (obj_2(D) != 0B)
1432 goto <bb 3>;
1433 else
1434 goto <bb 4>;
1435
1436 <bb 3>:
1437 iftmp.1_3 = &obj_2(D)->D.1762;
1438
1439 <bb 4>:
1440 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1441 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1442 return D.1879_6; */
1443
1444 static void
1445 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1446 struct ipa_node_params *info,
1447 struct ipa_jump_func *jfunc,
1448 gimple call, gimple phi, tree param_type)
1449 {
1450 HOST_WIDE_INT offset;
1451 gimple assign, cond;
1452 basic_block phi_bb, assign_bb, cond_bb;
1453 tree tmp, parm, expr, obj;
1454 int index, i;
1455
1456 if (gimple_phi_num_args (phi) != 2)
1457 return;
1458
1459 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1460 tmp = PHI_ARG_DEF (phi, 0);
1461 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1462 tmp = PHI_ARG_DEF (phi, 1);
1463 else
1464 return;
1465 if (TREE_CODE (tmp) != SSA_NAME
1466 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1467 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1468 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1469 return;
1470
1471 assign = SSA_NAME_DEF_STMT (tmp);
1472 assign_bb = gimple_bb (assign);
1473 if (!single_pred_p (assign_bb))
1474 return;
1475 expr = get_ancestor_addr_info (assign, &obj, &offset);
1476 if (!expr)
1477 return;
1478 parm = TREE_OPERAND (expr, 0);
1479 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1480 if (index < 0)
1481 return;
1482
1483 cond_bb = single_pred (assign_bb);
1484 cond = last_stmt (cond_bb);
1485 if (!cond
1486 || gimple_code (cond) != GIMPLE_COND
1487 || gimple_cond_code (cond) != NE_EXPR
1488 || gimple_cond_lhs (cond) != parm
1489 || !integer_zerop (gimple_cond_rhs (cond)))
1490 return;
1491
1492 phi_bb = gimple_bb (phi);
1493 for (i = 0; i < 2; i++)
1494 {
1495 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1496 if (pred != assign_bb && pred != cond_bb)
1497 return;
1498 }
1499
1500 bool type_p = false;
1501 if (param_type && POINTER_TYPE_P (param_type)
1502 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
1503 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1504 call, jfunc, offset);
1505 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1506 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1507 index,
1508 parm_ref_data_pass_through_p (fbi, index, call, parm),
1509 type_p);
1510 }
1511
1512 /* Given OP which is passed as an actual argument to a called function,
1513 determine if it is possible to construct a KNOWN_TYPE jump function for it
1514 and if so, create one and store it to JFUNC.
1515 EXPECTED_TYPE represents a type the argument should be in */
1516
1517 static void
1518 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1519 gimple call, tree expected_type)
1520 {
1521 HOST_WIDE_INT offset, size, max_size;
1522 tree base;
1523
1524 if (!flag_devirtualize
1525 || TREE_CODE (op) != ADDR_EXPR
1526 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
1527 /* Be sure expected_type is polymorphic. */
1528 || !expected_type
1529 || !contains_polymorphic_type_p (expected_type))
1530 return;
1531
1532 op = TREE_OPERAND (op, 0);
1533 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1534 if (!DECL_P (base)
1535 || max_size == -1
1536 || max_size != size
1537 || !contains_polymorphic_type_p (TREE_TYPE (base)))
1538 return;
1539
1540 if (decl_maybe_in_construction_p (base, TREE_TYPE (base),
1541 call, current_function_decl)
1542 /* Even if the var seems to be in construction by inline call stack,
1543 we may work out the actual type by walking memory writes. */
1544 && (is_global_var (base)
1545 || detect_type_change (op, base, expected_type, call, jfunc, offset)))
1546 return;
1547
1548 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1549 expected_type);
1550 }
1551
1552 /* Inspect the given TYPE and return true iff it has the same structure (the
1553 same number of fields of the same types) as a C++ member pointer. If
1554 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1555 corresponding fields there. */
1556
1557 static bool
1558 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1559 {
1560 tree fld;
1561
1562 if (TREE_CODE (type) != RECORD_TYPE)
1563 return false;
1564
1565 fld = TYPE_FIELDS (type);
1566 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1567 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1568 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1569 return false;
1570
1571 if (method_ptr)
1572 *method_ptr = fld;
1573
1574 fld = DECL_CHAIN (fld);
1575 if (!fld || INTEGRAL_TYPE_P (fld)
1576 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1577 return false;
1578 if (delta)
1579 *delta = fld;
1580
1581 if (DECL_CHAIN (fld))
1582 return false;
1583
1584 return true;
1585 }
1586
1587 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1588 return the rhs of its defining statement. Otherwise return RHS as it
1589 is. */
1590
1591 static inline tree
1592 get_ssa_def_if_simple_copy (tree rhs)
1593 {
1594 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1595 {
1596 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1597
1598 if (gimple_assign_single_p (def_stmt))
1599 rhs = gimple_assign_rhs1 (def_stmt);
1600 else
1601 break;
1602 }
1603 return rhs;
1604 }
1605
1606 /* Simple linked list, describing known contents of an aggregate beforere
1607 call. */
1608
1609 struct ipa_known_agg_contents_list
1610 {
1611 /* Offset and size of the described part of the aggregate. */
1612 HOST_WIDE_INT offset, size;
1613 /* Known constant value or NULL if the contents is known to be unknown. */
1614 tree constant;
1615 /* Pointer to the next structure in the list. */
1616 struct ipa_known_agg_contents_list *next;
1617 };
1618
1619 /* Find the proper place in linked list of ipa_known_agg_contents_list
1620 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1621 unless there is a partial overlap, in which case return NULL, or such
1622 element is already there, in which case set *ALREADY_THERE to true. */
1623
1624 static struct ipa_known_agg_contents_list **
1625 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1626 HOST_WIDE_INT lhs_offset,
1627 HOST_WIDE_INT lhs_size,
1628 bool *already_there)
1629 {
1630 struct ipa_known_agg_contents_list **p = list;
1631 while (*p && (*p)->offset < lhs_offset)
1632 {
1633 if ((*p)->offset + (*p)->size > lhs_offset)
1634 return NULL;
1635 p = &(*p)->next;
1636 }
1637
1638 if (*p && (*p)->offset < lhs_offset + lhs_size)
1639 {
1640 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1641 /* We already know this value is subsequently overwritten with
1642 something else. */
1643 *already_there = true;
1644 else
1645 /* Otherwise this is a partial overlap which we cannot
1646 represent. */
1647 return NULL;
1648 }
1649 return p;
1650 }
1651
1652 /* Build aggregate jump function from LIST, assuming there are exactly
1653 CONST_COUNT constant entries there and that th offset of the passed argument
1654 is ARG_OFFSET and store it into JFUNC. */
1655
1656 static void
1657 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1658 int const_count, HOST_WIDE_INT arg_offset,
1659 struct ipa_jump_func *jfunc)
1660 {
1661 vec_alloc (jfunc->agg.items, const_count);
1662 while (list)
1663 {
1664 if (list->constant)
1665 {
1666 struct ipa_agg_jf_item item;
1667 item.offset = list->offset - arg_offset;
1668 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1669 item.value = unshare_expr_without_location (list->constant);
1670 jfunc->agg.items->quick_push (item);
1671 }
1672 list = list->next;
1673 }
1674 }
1675
1676 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1677 in ARG is filled in with constant values. ARG can either be an aggregate
1678 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1679 aggregate. JFUNC is the jump function into which the constants are
1680 subsequently stored. */
1681
1682 static void
1683 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1684 struct ipa_jump_func *jfunc)
1685 {
1686 struct ipa_known_agg_contents_list *list = NULL;
1687 int item_count = 0, const_count = 0;
1688 HOST_WIDE_INT arg_offset, arg_size;
1689 gimple_stmt_iterator gsi;
1690 tree arg_base;
1691 bool check_ref, by_ref;
1692 ao_ref r;
1693
1694 /* The function operates in three stages. First, we prepare check_ref, r,
1695 arg_base and arg_offset based on what is actually passed as an actual
1696 argument. */
1697
1698 if (POINTER_TYPE_P (arg_type))
1699 {
1700 by_ref = true;
1701 if (TREE_CODE (arg) == SSA_NAME)
1702 {
1703 tree type_size;
1704 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1705 return;
1706 check_ref = true;
1707 arg_base = arg;
1708 arg_offset = 0;
1709 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1710 arg_size = tree_to_uhwi (type_size);
1711 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1712 }
1713 else if (TREE_CODE (arg) == ADDR_EXPR)
1714 {
1715 HOST_WIDE_INT arg_max_size;
1716
1717 arg = TREE_OPERAND (arg, 0);
1718 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1719 &arg_max_size);
1720 if (arg_max_size == -1
1721 || arg_max_size != arg_size
1722 || arg_offset < 0)
1723 return;
1724 if (DECL_P (arg_base))
1725 {
1726 check_ref = false;
1727 ao_ref_init (&r, arg_base);
1728 }
1729 else
1730 return;
1731 }
1732 else
1733 return;
1734 }
1735 else
1736 {
1737 HOST_WIDE_INT arg_max_size;
1738
1739 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1740
1741 by_ref = false;
1742 check_ref = false;
1743 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1744 &arg_max_size);
1745 if (arg_max_size == -1
1746 || arg_max_size != arg_size
1747 || arg_offset < 0)
1748 return;
1749
1750 ao_ref_init (&r, arg);
1751 }
1752
1753 /* Second stage walks back the BB, looks at individual statements and as long
1754 as it is confident of how the statements affect contents of the
1755 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1756 describing it. */
1757 gsi = gsi_for_stmt (call);
1758 gsi_prev (&gsi);
1759 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1760 {
1761 struct ipa_known_agg_contents_list *n, **p;
1762 gimple stmt = gsi_stmt (gsi);
1763 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1764 tree lhs, rhs, lhs_base;
1765
1766 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1767 continue;
1768 if (!gimple_assign_single_p (stmt))
1769 break;
1770
1771 lhs = gimple_assign_lhs (stmt);
1772 rhs = gimple_assign_rhs1 (stmt);
1773 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1774 || TREE_CODE (lhs) == BIT_FIELD_REF
1775 || contains_bitfld_component_ref_p (lhs))
1776 break;
1777
1778 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1779 &lhs_max_size);
1780 if (lhs_max_size == -1
1781 || lhs_max_size != lhs_size)
1782 break;
1783
1784 if (check_ref)
1785 {
1786 if (TREE_CODE (lhs_base) != MEM_REF
1787 || TREE_OPERAND (lhs_base, 0) != arg_base
1788 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1789 break;
1790 }
1791 else if (lhs_base != arg_base)
1792 {
1793 if (DECL_P (lhs_base))
1794 continue;
1795 else
1796 break;
1797 }
1798
1799 bool already_there = false;
1800 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1801 &already_there);
1802 if (!p)
1803 break;
1804 if (already_there)
1805 continue;
1806
1807 rhs = get_ssa_def_if_simple_copy (rhs);
1808 n = XALLOCA (struct ipa_known_agg_contents_list);
1809 n->size = lhs_size;
1810 n->offset = lhs_offset;
1811 if (is_gimple_ip_invariant (rhs))
1812 {
1813 n->constant = rhs;
1814 const_count++;
1815 }
1816 else
1817 n->constant = NULL_TREE;
1818 n->next = *p;
1819 *p = n;
1820
1821 item_count++;
1822 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1823 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1824 break;
1825 }
1826
1827 /* Third stage just goes over the list and creates an appropriate vector of
1828 ipa_agg_jf_item structures out of it, of sourse only if there are
1829 any known constants to begin with. */
1830
1831 if (const_count)
1832 {
1833 jfunc->agg.by_ref = by_ref;
1834 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1835 }
1836 }
1837
1838 static tree
1839 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1840 {
1841 int n;
1842 tree type = (e->callee
1843 ? TREE_TYPE (e->callee->decl)
1844 : gimple_call_fntype (e->call_stmt));
1845 tree t = TYPE_ARG_TYPES (type);
1846
1847 for (n = 0; n < i; n++)
1848 {
1849 if (!t)
1850 break;
1851 t = TREE_CHAIN (t);
1852 }
1853 if (t)
1854 return TREE_VALUE (t);
1855 if (!e->callee)
1856 return NULL;
1857 t = DECL_ARGUMENTS (e->callee->decl);
1858 for (n = 0; n < i; n++)
1859 {
1860 if (!t)
1861 return NULL;
1862 t = TREE_CHAIN (t);
1863 }
1864 if (t)
1865 return TREE_TYPE (t);
1866 return NULL;
1867 }
1868
1869 /* Compute jump function for all arguments of callsite CS and insert the
1870 information in the jump_functions array in the ipa_edge_args corresponding
1871 to this callsite. */
1872
1873 static void
1874 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1875 struct cgraph_edge *cs)
1876 {
1877 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1878 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1879 gimple call = cs->call_stmt;
1880 int n, arg_num = gimple_call_num_args (call);
1881 bool useful_context = false;
1882
1883 if (arg_num == 0 || args->jump_functions)
1884 return;
1885 vec_safe_grow_cleared (args->jump_functions, arg_num);
1886 if (flag_devirtualize)
1887 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1888
1889 if (gimple_call_internal_p (call))
1890 return;
1891 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1892 return;
1893
1894 for (n = 0; n < arg_num; n++)
1895 {
1896 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1897 tree arg = gimple_call_arg (call, n);
1898 tree param_type = ipa_get_callee_param_type (cs, n);
1899 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1900 {
1901 tree instance;
1902 struct ipa_polymorphic_call_context context (cs->caller->decl,
1903 arg, cs->call_stmt,
1904 &instance);
1905 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1906 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1907 if (!context.useless_p ())
1908 useful_context = true;
1909 }
1910
1911 if (is_gimple_ip_invariant (arg))
1912 ipa_set_jf_constant (jfunc, arg, cs);
1913 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1914 && TREE_CODE (arg) == PARM_DECL)
1915 {
1916 int index = ipa_get_param_decl_index (info, arg);
1917
1918 gcc_assert (index >=0);
1919 /* Aggregate passed by value, check for pass-through, otherwise we
1920 will attempt to fill in aggregate contents later in this
1921 for cycle. */
1922 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1923 {
1924 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1925 continue;
1926 }
1927 }
1928 else if (TREE_CODE (arg) == SSA_NAME)
1929 {
1930 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1931 {
1932 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1933 if (index >= 0)
1934 {
1935 bool agg_p, type_p;
1936 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1937 if (param_type && POINTER_TYPE_P (param_type))
1938 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1939 call, jfunc);
1940 else
1941 type_p = false;
1942 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1943 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1944 type_p);
1945 }
1946 }
1947 else
1948 {
1949 gimple stmt = SSA_NAME_DEF_STMT (arg);
1950 if (is_gimple_assign (stmt))
1951 compute_complex_assign_jump_func (fbi, info, jfunc,
1952 call, stmt, arg, param_type);
1953 else if (gimple_code (stmt) == GIMPLE_PHI)
1954 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1955 call, stmt, param_type);
1956 }
1957 }
1958 else
1959 compute_known_type_jump_func (arg, jfunc, call,
1960 param_type
1961 && POINTER_TYPE_P (param_type)
1962 ? TREE_TYPE (param_type)
1963 : NULL);
1964
1965 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1966 passed (because type conversions are ignored in gimple). Usually we can
1967 safely get type from function declaration, but in case of K&R prototypes or
1968 variadic functions we can try our luck with type of the pointer passed.
1969 TODO: Since we look for actual initialization of the memory object, we may better
1970 work out the type based on the memory stores we find. */
1971 if (!param_type)
1972 param_type = TREE_TYPE (arg);
1973
1974 if ((jfunc->type != IPA_JF_PASS_THROUGH
1975 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1976 && (jfunc->type != IPA_JF_ANCESTOR
1977 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1978 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1979 || POINTER_TYPE_P (param_type)))
1980 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1981 }
1982 if (!useful_context)
1983 vec_free (args->polymorphic_call_contexts);
1984 }
1985
1986 /* Compute jump functions for all edges - both direct and indirect - outgoing
1987 from BB. */
1988
1989 static void
1990 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1991 {
1992 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1993 int i;
1994 struct cgraph_edge *cs;
1995
1996 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1997 {
1998 struct cgraph_node *callee = cs->callee;
1999
2000 if (callee)
2001 {
2002 callee->ultimate_alias_target ();
2003 /* We do not need to bother analyzing calls to unknown functions
2004 unless they may become known during lto/whopr. */
2005 if (!callee->definition && !flag_lto)
2006 continue;
2007 }
2008 ipa_compute_jump_functions_for_edge (fbi, cs);
2009 }
2010 }
2011
2012 /* If STMT looks like a statement loading a value from a member pointer formal
2013 parameter, return that parameter and store the offset of the field to
2014 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2015 might be clobbered). If USE_DELTA, then we look for a use of the delta
2016 field rather than the pfn. */
2017
2018 static tree
2019 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
2020 HOST_WIDE_INT *offset_p)
2021 {
2022 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2023
2024 if (!gimple_assign_single_p (stmt))
2025 return NULL_TREE;
2026
2027 rhs = gimple_assign_rhs1 (stmt);
2028 if (TREE_CODE (rhs) == COMPONENT_REF)
2029 {
2030 ref_field = TREE_OPERAND (rhs, 1);
2031 rhs = TREE_OPERAND (rhs, 0);
2032 }
2033 else
2034 ref_field = NULL_TREE;
2035 if (TREE_CODE (rhs) != MEM_REF)
2036 return NULL_TREE;
2037 rec = TREE_OPERAND (rhs, 0);
2038 if (TREE_CODE (rec) != ADDR_EXPR)
2039 return NULL_TREE;
2040 rec = TREE_OPERAND (rec, 0);
2041 if (TREE_CODE (rec) != PARM_DECL
2042 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2043 return NULL_TREE;
2044 ref_offset = TREE_OPERAND (rhs, 1);
2045
2046 if (use_delta)
2047 fld = delta_field;
2048 else
2049 fld = ptr_field;
2050 if (offset_p)
2051 *offset_p = int_bit_position (fld);
2052
2053 if (ref_field)
2054 {
2055 if (integer_nonzerop (ref_offset))
2056 return NULL_TREE;
2057 return ref_field == fld ? rec : NULL_TREE;
2058 }
2059 else
2060 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2061 : NULL_TREE;
2062 }
2063
2064 /* Returns true iff T is an SSA_NAME defined by a statement. */
2065
2066 static bool
2067 ipa_is_ssa_with_stmt_def (tree t)
2068 {
2069 if (TREE_CODE (t) == SSA_NAME
2070 && !SSA_NAME_IS_DEFAULT_DEF (t))
2071 return true;
2072 else
2073 return false;
2074 }
2075
2076 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2077 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2078 indirect call graph edge. */
2079
2080 static struct cgraph_edge *
2081 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
2082 {
2083 struct cgraph_edge *cs;
2084
2085 cs = node->get_edge (stmt);
2086 cs->indirect_info->param_index = param_index;
2087 cs->indirect_info->agg_contents = 0;
2088 cs->indirect_info->member_ptr = 0;
2089 return cs;
2090 }
2091
2092 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2093 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2094 intermediate information about each formal parameter. Currently it checks
2095 whether the call calls a pointer that is a formal parameter and if so, the
2096 parameter is marked with the called flag and an indirect call graph edge
2097 describing the call is created. This is very simple for ordinary pointers
2098 represented in SSA but not-so-nice when it comes to member pointers. The
2099 ugly part of this function does nothing more than trying to match the
2100 pattern of such a call. An example of such a pattern is the gimple dump
2101 below, the call is on the last line:
2102
2103 <bb 2>:
2104 f$__delta_5 = f.__delta;
2105 f$__pfn_24 = f.__pfn;
2106
2107 or
2108 <bb 2>:
2109 f$__delta_5 = MEM[(struct *)&f];
2110 f$__pfn_24 = MEM[(struct *)&f + 4B];
2111
2112 and a few lines below:
2113
2114 <bb 5>
2115 D.2496_3 = (int) f$__pfn_24;
2116 D.2497_4 = D.2496_3 & 1;
2117 if (D.2497_4 != 0)
2118 goto <bb 3>;
2119 else
2120 goto <bb 4>;
2121
2122 <bb 6>:
2123 D.2500_7 = (unsigned int) f$__delta_5;
2124 D.2501_8 = &S + D.2500_7;
2125 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2126 D.2503_10 = *D.2502_9;
2127 D.2504_12 = f$__pfn_24 + -1;
2128 D.2505_13 = (unsigned int) D.2504_12;
2129 D.2506_14 = D.2503_10 + D.2505_13;
2130 D.2507_15 = *D.2506_14;
2131 iftmp.11_16 = (String:: *) D.2507_15;
2132
2133 <bb 7>:
2134 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2135 D.2500_19 = (unsigned int) f$__delta_5;
2136 D.2508_20 = &S + D.2500_19;
2137 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2138
2139 Such patterns are results of simple calls to a member pointer:
2140
2141 int doprinting (int (MyString::* f)(int) const)
2142 {
2143 MyString S ("somestring");
2144
2145 return (S.*f)(4);
2146 }
2147
2148 Moreover, the function also looks for called pointers loaded from aggregates
2149 passed by value or reference. */
2150
2151 static void
2152 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2153 tree target)
2154 {
2155 struct ipa_node_params *info = fbi->info;
2156 HOST_WIDE_INT offset;
2157 bool by_ref;
2158
2159 if (SSA_NAME_IS_DEFAULT_DEF (target))
2160 {
2161 tree var = SSA_NAME_VAR (target);
2162 int index = ipa_get_param_decl_index (info, var);
2163 if (index >= 0)
2164 ipa_note_param_call (fbi->node, index, call);
2165 return;
2166 }
2167
2168 int index;
2169 gimple def = SSA_NAME_DEF_STMT (target);
2170 if (gimple_assign_single_p (def)
2171 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2172 gimple_assign_rhs1 (def), &index, &offset,
2173 NULL, &by_ref))
2174 {
2175 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2176 cs->indirect_info->offset = offset;
2177 cs->indirect_info->agg_contents = 1;
2178 cs->indirect_info->by_ref = by_ref;
2179 return;
2180 }
2181
2182 /* Now we need to try to match the complex pattern of calling a member
2183 pointer. */
2184 if (gimple_code (def) != GIMPLE_PHI
2185 || gimple_phi_num_args (def) != 2
2186 || !POINTER_TYPE_P (TREE_TYPE (target))
2187 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2188 return;
2189
2190 /* First, we need to check whether one of these is a load from a member
2191 pointer that is a parameter to this function. */
2192 tree n1 = PHI_ARG_DEF (def, 0);
2193 tree n2 = PHI_ARG_DEF (def, 1);
2194 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2195 return;
2196 gimple d1 = SSA_NAME_DEF_STMT (n1);
2197 gimple d2 = SSA_NAME_DEF_STMT (n2);
2198
2199 tree rec;
2200 basic_block bb, virt_bb;
2201 basic_block join = gimple_bb (def);
2202 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2203 {
2204 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2205 return;
2206
2207 bb = EDGE_PRED (join, 0)->src;
2208 virt_bb = gimple_bb (d2);
2209 }
2210 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2211 {
2212 bb = EDGE_PRED (join, 1)->src;
2213 virt_bb = gimple_bb (d1);
2214 }
2215 else
2216 return;
2217
2218 /* Second, we need to check that the basic blocks are laid out in the way
2219 corresponding to the pattern. */
2220
2221 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2222 || single_pred (virt_bb) != bb
2223 || single_succ (virt_bb) != join)
2224 return;
2225
2226 /* Third, let's see that the branching is done depending on the least
2227 significant bit of the pfn. */
2228
2229 gimple branch = last_stmt (bb);
2230 if (!branch || gimple_code (branch) != GIMPLE_COND)
2231 return;
2232
2233 if ((gimple_cond_code (branch) != NE_EXPR
2234 && gimple_cond_code (branch) != EQ_EXPR)
2235 || !integer_zerop (gimple_cond_rhs (branch)))
2236 return;
2237
2238 tree cond = gimple_cond_lhs (branch);
2239 if (!ipa_is_ssa_with_stmt_def (cond))
2240 return;
2241
2242 def = SSA_NAME_DEF_STMT (cond);
2243 if (!is_gimple_assign (def)
2244 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2245 || !integer_onep (gimple_assign_rhs2 (def)))
2246 return;
2247
2248 cond = gimple_assign_rhs1 (def);
2249 if (!ipa_is_ssa_with_stmt_def (cond))
2250 return;
2251
2252 def = SSA_NAME_DEF_STMT (cond);
2253
2254 if (is_gimple_assign (def)
2255 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2256 {
2257 cond = gimple_assign_rhs1 (def);
2258 if (!ipa_is_ssa_with_stmt_def (cond))
2259 return;
2260 def = SSA_NAME_DEF_STMT (cond);
2261 }
2262
2263 tree rec2;
2264 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2265 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2266 == ptrmemfunc_vbit_in_delta),
2267 NULL);
2268 if (rec != rec2)
2269 return;
2270
2271 index = ipa_get_param_decl_index (info, rec);
2272 if (index >= 0
2273 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2274 {
2275 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2276 cs->indirect_info->offset = offset;
2277 cs->indirect_info->agg_contents = 1;
2278 cs->indirect_info->member_ptr = 1;
2279 }
2280
2281 return;
2282 }
2283
2284 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2285 object referenced in the expression is a formal parameter of the caller
2286 FBI->node (described by FBI->info), create a call note for the
2287 statement. */
2288
2289 static void
2290 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2291 gimple call, tree target)
2292 {
2293 tree obj = OBJ_TYPE_REF_OBJECT (target);
2294 int index;
2295 HOST_WIDE_INT anc_offset;
2296
2297 if (!flag_devirtualize)
2298 return;
2299
2300 if (TREE_CODE (obj) != SSA_NAME)
2301 return;
2302
2303 struct ipa_node_params *info = fbi->info;
2304 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2305 {
2306 struct ipa_jump_func jfunc;
2307 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2308 return;
2309
2310 anc_offset = 0;
2311 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2312 gcc_assert (index >= 0);
2313 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2314 call, &jfunc))
2315 return;
2316 }
2317 else
2318 {
2319 struct ipa_jump_func jfunc;
2320 gimple stmt = SSA_NAME_DEF_STMT (obj);
2321 tree expr;
2322
2323 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2324 if (!expr)
2325 return;
2326 index = ipa_get_param_decl_index (info,
2327 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2328 gcc_assert (index >= 0);
2329 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2330 call, &jfunc, anc_offset))
2331 return;
2332 }
2333
2334 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2335 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2336 ii->offset = anc_offset;
2337 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2338 ii->otr_type = obj_type_ref_class (target);
2339 ii->polymorphic = 1;
2340 }
2341
2342 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2343 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2344 containing intermediate information about each formal parameter. */
2345
2346 static void
2347 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2348 {
2349 tree target = gimple_call_fn (call);
2350
2351 if (!target
2352 || (TREE_CODE (target) != SSA_NAME
2353 && !virtual_method_call_p (target)))
2354 return;
2355
2356 struct cgraph_edge *cs = fbi->node->get_edge (call);
2357 /* If we previously turned the call into a direct call, there is
2358 no need to analyze. */
2359 if (cs && !cs->indirect_unknown_callee)
2360 return;
2361
2362 if (cs->indirect_info->polymorphic)
2363 {
2364 tree instance;
2365 tree target = gimple_call_fn (call);
2366 ipa_polymorphic_call_context context (current_function_decl,
2367 target, call, &instance);
2368
2369 gcc_checking_assert (cs->indirect_info->otr_type
2370 == obj_type_ref_class (target));
2371 gcc_checking_assert (cs->indirect_info->otr_token
2372 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2373
2374 cs->indirect_info->vptr_changed
2375 = !context.get_dynamic_type (instance,
2376 OBJ_TYPE_REF_OBJECT (target),
2377 obj_type_ref_class (target), call);
2378 cs->indirect_info->context = context;
2379 }
2380
2381 if (TREE_CODE (target) == SSA_NAME)
2382 ipa_analyze_indirect_call_uses (fbi, call, target);
2383 else if (virtual_method_call_p (target))
2384 ipa_analyze_virtual_call_uses (fbi, call, target);
2385 }
2386
2387
2388 /* Analyze the call statement STMT with respect to formal parameters (described
2389 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2390 formal parameters are called. */
2391
2392 static void
2393 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2394 {
2395 if (is_gimple_call (stmt))
2396 ipa_analyze_call_uses (fbi, stmt);
2397 }
2398
2399 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2400 If OP is a parameter declaration, mark it as used in the info structure
2401 passed in DATA. */
2402
2403 static bool
2404 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2405 {
2406 struct ipa_node_params *info = (struct ipa_node_params *) data;
2407
2408 op = get_base_address (op);
2409 if (op
2410 && TREE_CODE (op) == PARM_DECL)
2411 {
2412 int index = ipa_get_param_decl_index (info, op);
2413 gcc_assert (index >= 0);
2414 ipa_set_param_used (info, index, true);
2415 }
2416
2417 return false;
2418 }
2419
2420 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2421 the findings in various structures of the associated ipa_node_params
2422 structure, such as parameter flags, notes etc. FBI holds various data about
2423 the function being analyzed. */
2424
2425 static void
2426 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2427 {
2428 gimple_stmt_iterator gsi;
2429 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2430 {
2431 gimple stmt = gsi_stmt (gsi);
2432
2433 if (is_gimple_debug (stmt))
2434 continue;
2435
2436 ipa_analyze_stmt_uses (fbi, stmt);
2437 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2438 visit_ref_for_mod_analysis,
2439 visit_ref_for_mod_analysis,
2440 visit_ref_for_mod_analysis);
2441 }
2442 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2443 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2444 visit_ref_for_mod_analysis,
2445 visit_ref_for_mod_analysis,
2446 visit_ref_for_mod_analysis);
2447 }
2448
2449 /* Calculate controlled uses of parameters of NODE. */
2450
2451 static void
2452 ipa_analyze_controlled_uses (struct cgraph_node *node)
2453 {
2454 struct ipa_node_params *info = IPA_NODE_REF (node);
2455
2456 for (int i = 0; i < ipa_get_param_count (info); i++)
2457 {
2458 tree parm = ipa_get_param (info, i);
2459 int controlled_uses = 0;
2460
2461 /* For SSA regs see if parameter is used. For non-SSA we compute
2462 the flag during modification analysis. */
2463 if (is_gimple_reg (parm))
2464 {
2465 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2466 parm);
2467 if (ddef && !has_zero_uses (ddef))
2468 {
2469 imm_use_iterator imm_iter;
2470 use_operand_p use_p;
2471
2472 ipa_set_param_used (info, i, true);
2473 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2474 if (!is_gimple_call (USE_STMT (use_p)))
2475 {
2476 if (!is_gimple_debug (USE_STMT (use_p)))
2477 {
2478 controlled_uses = IPA_UNDESCRIBED_USE;
2479 break;
2480 }
2481 }
2482 else
2483 controlled_uses++;
2484 }
2485 else
2486 controlled_uses = 0;
2487 }
2488 else
2489 controlled_uses = IPA_UNDESCRIBED_USE;
2490 ipa_set_controlled_uses (info, i, controlled_uses);
2491 }
2492 }
2493
2494 /* Free stuff in BI. */
2495
2496 static void
2497 free_ipa_bb_info (struct ipa_bb_info *bi)
2498 {
2499 bi->cg_edges.release ();
2500 bi->param_aa_statuses.release ();
2501 }
2502
2503 /* Dominator walker driving the analysis. */
2504
2505 class analysis_dom_walker : public dom_walker
2506 {
2507 public:
2508 analysis_dom_walker (struct func_body_info *fbi)
2509 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2510
2511 virtual void before_dom_children (basic_block);
2512
2513 private:
2514 struct func_body_info *m_fbi;
2515 };
2516
2517 void
2518 analysis_dom_walker::before_dom_children (basic_block bb)
2519 {
2520 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2521 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2522 }
2523
2524 /* Initialize the array describing properties of of formal parameters
2525 of NODE, analyze their uses and compute jump functions associated
2526 with actual arguments of calls from within NODE. */
2527
2528 void
2529 ipa_analyze_node (struct cgraph_node *node)
2530 {
2531 struct func_body_info fbi;
2532 struct ipa_node_params *info;
2533
2534 ipa_check_create_node_params ();
2535 ipa_check_create_edge_args ();
2536 info = IPA_NODE_REF (node);
2537
2538 if (info->analysis_done)
2539 return;
2540 info->analysis_done = 1;
2541
2542 if (ipa_func_spec_opts_forbid_analysis_p (node))
2543 {
2544 for (int i = 0; i < ipa_get_param_count (info); i++)
2545 {
2546 ipa_set_param_used (info, i, true);
2547 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2548 }
2549 return;
2550 }
2551
2552 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2553 push_cfun (func);
2554 calculate_dominance_info (CDI_DOMINATORS);
2555 ipa_initialize_node_params (node);
2556 ipa_analyze_controlled_uses (node);
2557
2558 fbi.node = node;
2559 fbi.info = IPA_NODE_REF (node);
2560 fbi.bb_infos = vNULL;
2561 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2562 fbi.param_count = ipa_get_param_count (info);
2563 fbi.aa_walked = 0;
2564
2565 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2566 {
2567 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2568 bi->cg_edges.safe_push (cs);
2569 }
2570
2571 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2572 {
2573 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2574 bi->cg_edges.safe_push (cs);
2575 }
2576
2577 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2578
2579 int i;
2580 struct ipa_bb_info *bi;
2581 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2582 free_ipa_bb_info (bi);
2583 fbi.bb_infos.release ();
2584 free_dominance_info (CDI_DOMINATORS);
2585 pop_cfun ();
2586 }
2587
2588 /* Update the jump function DST when the call graph edge corresponding to SRC is
2589 is being inlined, knowing that DST is of type ancestor and src of known
2590 type. */
2591
2592 static void
2593 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2594 struct ipa_jump_func *dst)
2595 {
2596 HOST_WIDE_INT combined_offset;
2597 tree combined_type;
2598
2599 if (!ipa_get_jf_ancestor_type_preserved (dst))
2600 {
2601 dst->type = IPA_JF_UNKNOWN;
2602 return;
2603 }
2604
2605 combined_offset = ipa_get_jf_known_type_offset (src)
2606 + ipa_get_jf_ancestor_offset (dst);
2607 combined_type = ipa_get_jf_ancestor_type (dst);
2608
2609 ipa_set_jf_known_type (dst, combined_offset,
2610 ipa_get_jf_known_type_base_type (src),
2611 combined_type);
2612 }
2613
2614 /* Update the jump functions associated with call graph edge E when the call
2615 graph edge CS is being inlined, assuming that E->caller is already (possibly
2616 indirectly) inlined into CS->callee and that E has not been inlined. */
2617
2618 static void
2619 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2620 struct cgraph_edge *e)
2621 {
2622 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2623 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2624 int count = ipa_get_cs_argument_count (args);
2625 int i;
2626
2627 for (i = 0; i < count; i++)
2628 {
2629 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2630 struct ipa_polymorphic_call_context *dst_ctx
2631 = ipa_get_ith_polymorhic_call_context (args, i);
2632
2633 if (dst->type == IPA_JF_ANCESTOR)
2634 {
2635 struct ipa_jump_func *src;
2636 int dst_fid = dst->value.ancestor.formal_id;
2637 struct ipa_polymorphic_call_context *src_ctx
2638 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2639
2640 /* Variable number of arguments can cause havoc if we try to access
2641 one that does not exist in the inlined edge. So make sure we
2642 don't. */
2643 if (dst_fid >= ipa_get_cs_argument_count (top))
2644 {
2645 dst->type = IPA_JF_UNKNOWN;
2646 continue;
2647 }
2648
2649 src = ipa_get_ith_jump_func (top, dst_fid);
2650
2651 if (src_ctx && !src_ctx->useless_p ())
2652 {
2653 struct ipa_polymorphic_call_context ctx = *src_ctx;
2654
2655 /* TODO: Make type preserved safe WRT contexts. */
2656 if (!dst->value.ancestor.agg_preserved)
2657 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2658 ctx.offset_by (dst->value.ancestor.offset);
2659 if (!ctx.useless_p ())
2660 {
2661 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2662 count);
2663 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2664 }
2665 }
2666
2667 if (src->agg.items
2668 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2669 {
2670 struct ipa_agg_jf_item *item;
2671 int j;
2672
2673 /* Currently we do not produce clobber aggregate jump functions,
2674 replace with merging when we do. */
2675 gcc_assert (!dst->agg.items);
2676
2677 dst->agg.items = vec_safe_copy (src->agg.items);
2678 dst->agg.by_ref = src->agg.by_ref;
2679 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2680 item->offset -= dst->value.ancestor.offset;
2681 }
2682
2683 if (src->type == IPA_JF_KNOWN_TYPE)
2684 combine_known_type_and_ancestor_jfs (src, dst);
2685 else if (src->type == IPA_JF_PASS_THROUGH
2686 && src->value.pass_through.operation == NOP_EXPR)
2687 {
2688 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2689 dst->value.ancestor.agg_preserved &=
2690 src->value.pass_through.agg_preserved;
2691 dst->value.ancestor.type_preserved &=
2692 src->value.pass_through.type_preserved;
2693 }
2694 else if (src->type == IPA_JF_ANCESTOR)
2695 {
2696 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2697 dst->value.ancestor.offset += src->value.ancestor.offset;
2698 dst->value.ancestor.agg_preserved &=
2699 src->value.ancestor.agg_preserved;
2700 dst->value.ancestor.type_preserved &=
2701 src->value.ancestor.type_preserved;
2702 }
2703 else
2704 dst->type = IPA_JF_UNKNOWN;
2705 }
2706 else if (dst->type == IPA_JF_PASS_THROUGH)
2707 {
2708 struct ipa_jump_func *src;
2709 /* We must check range due to calls with variable number of arguments
2710 and we cannot combine jump functions with operations. */
2711 if (dst->value.pass_through.operation == NOP_EXPR
2712 && (dst->value.pass_through.formal_id
2713 < ipa_get_cs_argument_count (top)))
2714 {
2715 int dst_fid = dst->value.pass_through.formal_id;
2716 src = ipa_get_ith_jump_func (top, dst_fid);
2717 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2718 struct ipa_polymorphic_call_context *src_ctx
2719 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2720
2721 if (src_ctx && !src_ctx->useless_p ())
2722 {
2723 struct ipa_polymorphic_call_context ctx = *src_ctx;
2724
2725 /* TODO: Make type preserved safe WRT contexts. */
2726 if (!dst->value.ancestor.agg_preserved)
2727 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2728 if (!ctx.useless_p ())
2729 {
2730 if (!dst_ctx)
2731 {
2732 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2733 count);
2734 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2735 }
2736 dst_ctx->combine_with (ctx);
2737 }
2738 }
2739 switch (src->type)
2740 {
2741 case IPA_JF_UNKNOWN:
2742 dst->type = IPA_JF_UNKNOWN;
2743 break;
2744 case IPA_JF_KNOWN_TYPE:
2745 if (ipa_get_jf_pass_through_type_preserved (dst))
2746 ipa_set_jf_known_type (dst,
2747 ipa_get_jf_known_type_offset (src),
2748 ipa_get_jf_known_type_base_type (src),
2749 ipa_get_jf_known_type_component_type (src));
2750 else
2751 dst->type = IPA_JF_UNKNOWN;
2752 break;
2753 case IPA_JF_CONST:
2754 ipa_set_jf_cst_copy (dst, src);
2755 break;
2756
2757 case IPA_JF_PASS_THROUGH:
2758 {
2759 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2760 enum tree_code operation;
2761 operation = ipa_get_jf_pass_through_operation (src);
2762
2763 if (operation == NOP_EXPR)
2764 {
2765 bool agg_p, type_p;
2766 agg_p = dst_agg_p
2767 && ipa_get_jf_pass_through_agg_preserved (src);
2768 type_p = ipa_get_jf_pass_through_type_preserved (src)
2769 && ipa_get_jf_pass_through_type_preserved (dst);
2770 ipa_set_jf_simple_pass_through (dst, formal_id,
2771 agg_p, type_p);
2772 }
2773 else
2774 {
2775 tree operand = ipa_get_jf_pass_through_operand (src);
2776 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2777 operation);
2778 }
2779 break;
2780 }
2781 case IPA_JF_ANCESTOR:
2782 {
2783 bool agg_p, type_p;
2784 agg_p = dst_agg_p
2785 && ipa_get_jf_ancestor_agg_preserved (src);
2786 type_p = ipa_get_jf_ancestor_type_preserved (src)
2787 && ipa_get_jf_pass_through_type_preserved (dst);
2788 ipa_set_ancestor_jf (dst,
2789 ipa_get_jf_ancestor_offset (src),
2790 ipa_get_jf_ancestor_type (src),
2791 ipa_get_jf_ancestor_formal_id (src),
2792 agg_p, type_p);
2793 break;
2794 }
2795 default:
2796 gcc_unreachable ();
2797 }
2798
2799 if (src->agg.items
2800 && (dst_agg_p || !src->agg.by_ref))
2801 {
2802 /* Currently we do not produce clobber aggregate jump
2803 functions, replace with merging when we do. */
2804 gcc_assert (!dst->agg.items);
2805
2806 dst->agg.by_ref = src->agg.by_ref;
2807 dst->agg.items = vec_safe_copy (src->agg.items);
2808 }
2809 }
2810 else
2811 dst->type = IPA_JF_UNKNOWN;
2812 }
2813 }
2814 }
2815
2816 /* If TARGET is an addr_expr of a function declaration, make it the
2817 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2818 Otherwise, return NULL. */
2819
2820 struct cgraph_edge *
2821 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2822 bool speculative)
2823 {
2824 struct cgraph_node *callee;
2825 struct inline_edge_summary *es = inline_edge_summary (ie);
2826 bool unreachable = false;
2827
2828 if (TREE_CODE (target) == ADDR_EXPR)
2829 target = TREE_OPERAND (target, 0);
2830 if (TREE_CODE (target) != FUNCTION_DECL)
2831 {
2832 target = canonicalize_constructor_val (target, NULL);
2833 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2834 {
2835 if (ie->indirect_info->member_ptr)
2836 /* Member pointer call that goes through a VMT lookup. */
2837 return NULL;
2838
2839 if (dump_enabled_p ())
2840 {
2841 location_t loc = gimple_location_safe (ie->call_stmt);
2842 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2843 "discovered direct call to non-function in %s/%i, "
2844 "making it __builtin_unreachable\n",
2845 ie->caller->name (), ie->caller->order);
2846 }
2847
2848 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2849 callee = cgraph_node::get_create (target);
2850 unreachable = true;
2851 }
2852 else
2853 callee = cgraph_node::get (target);
2854 }
2855 else
2856 callee = cgraph_node::get (target);
2857
2858 /* Because may-edges are not explicitely represented and vtable may be external,
2859 we may create the first reference to the object in the unit. */
2860 if (!callee || callee->global.inlined_to)
2861 {
2862
2863 /* We are better to ensure we can refer to it.
2864 In the case of static functions we are out of luck, since we already
2865 removed its body. In the case of public functions we may or may
2866 not introduce the reference. */
2867 if (!canonicalize_constructor_val (target, NULL)
2868 || !TREE_PUBLIC (target))
2869 {
2870 if (dump_file)
2871 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2872 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2873 xstrdup (ie->caller->name ()),
2874 ie->caller->order,
2875 xstrdup (ie->callee->name ()),
2876 ie->callee->order);
2877 return NULL;
2878 }
2879 callee = cgraph_node::get_create (target);
2880 }
2881
2882 /* If the edge is already speculated. */
2883 if (speculative && ie->speculative)
2884 {
2885 struct cgraph_edge *e2;
2886 struct ipa_ref *ref;
2887 ie->speculative_call_info (e2, ie, ref);
2888 if (e2->callee->ultimate_alias_target ()
2889 != callee->ultimate_alias_target ())
2890 {
2891 if (dump_file)
2892 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2893 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2894 xstrdup (ie->caller->name ()),
2895 ie->caller->order,
2896 xstrdup (callee->name ()),
2897 callee->order,
2898 xstrdup (e2->callee->name ()),
2899 e2->callee->order);
2900 }
2901 else
2902 {
2903 if (dump_file)
2904 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2905 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2906 xstrdup (ie->caller->name ()),
2907 ie->caller->order,
2908 xstrdup (callee->name ()),
2909 callee->order);
2910 }
2911 return NULL;
2912 }
2913
2914 if (!dbg_cnt (devirt))
2915 return NULL;
2916
2917 ipa_check_create_node_params ();
2918
2919 /* We can not make edges to inline clones. It is bug that someone removed
2920 the cgraph node too early. */
2921 gcc_assert (!callee->global.inlined_to);
2922
2923 if (dump_file && !unreachable)
2924 {
2925 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2926 "(%s/%i -> %s/%i), for stmt ",
2927 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2928 speculative ? "speculative" : "known",
2929 xstrdup (ie->caller->name ()),
2930 ie->caller->order,
2931 xstrdup (callee->name ()),
2932 callee->order);
2933 if (ie->call_stmt)
2934 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2935 else
2936 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2937 }
2938 if (dump_enabled_p ())
2939 {
2940 location_t loc = gimple_location_safe (ie->call_stmt);
2941
2942 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2943 "converting indirect call in %s to direct call to %s\n",
2944 ie->caller->name (), callee->name ());
2945 }
2946 if (!speculative)
2947 ie = ie->make_direct (callee);
2948 else
2949 {
2950 if (!callee->can_be_discarded_p ())
2951 {
2952 cgraph_node *alias;
2953 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2954 if (alias)
2955 callee = alias;
2956 }
2957 ie = ie->make_speculative
2958 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2959 }
2960 es = inline_edge_summary (ie);
2961 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2962 - eni_size_weights.call_cost);
2963 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2964 - eni_time_weights.call_cost);
2965
2966 return ie;
2967 }
2968
2969 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2970 return NULL if there is not any. BY_REF specifies whether the value has to
2971 be passed by reference or by value. */
2972
2973 tree
2974 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2975 HOST_WIDE_INT offset, bool by_ref)
2976 {
2977 struct ipa_agg_jf_item *item;
2978 int i;
2979
2980 if (by_ref != agg->by_ref)
2981 return NULL;
2982
2983 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2984 if (item->offset == offset)
2985 {
2986 /* Currently we do not have clobber values, return NULL for them once
2987 we do. */
2988 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2989 return item->value;
2990 }
2991 return NULL;
2992 }
2993
2994 /* Remove a reference to SYMBOL from the list of references of a node given by
2995 reference description RDESC. Return true if the reference has been
2996 successfully found and removed. */
2997
2998 static bool
2999 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3000 {
3001 struct ipa_ref *to_del;
3002 struct cgraph_edge *origin;
3003
3004 origin = rdesc->cs;
3005 if (!origin)
3006 return false;
3007 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3008 origin->lto_stmt_uid);
3009 if (!to_del)
3010 return false;
3011
3012 to_del->remove_reference ();
3013 if (dump_file)
3014 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
3015 xstrdup (origin->caller->name ()),
3016 origin->caller->order, xstrdup (symbol->name ()));
3017 return true;
3018 }
3019
3020 /* If JFUNC has a reference description with refcount different from
3021 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3022 NULL. JFUNC must be a constant jump function. */
3023
3024 static struct ipa_cst_ref_desc *
3025 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3026 {
3027 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3028 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3029 return rdesc;
3030 else
3031 return NULL;
3032 }
3033
3034 /* If the value of constant jump function JFUNC is an address of a function
3035 declaration, return the associated call graph node. Otherwise return
3036 NULL. */
3037
3038 static cgraph_node *
3039 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3040 {
3041 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3042 tree cst = ipa_get_jf_constant (jfunc);
3043 if (TREE_CODE (cst) != ADDR_EXPR
3044 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3045 return NULL;
3046
3047 return cgraph_node::get (TREE_OPERAND (cst, 0));
3048 }
3049
3050
3051 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3052 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3053 the edge specified in the rdesc. Return false if either the symbol or the
3054 reference could not be found, otherwise return true. */
3055
3056 static bool
3057 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3058 {
3059 struct ipa_cst_ref_desc *rdesc;
3060 if (jfunc->type == IPA_JF_CONST
3061 && (rdesc = jfunc_rdesc_usable (jfunc))
3062 && --rdesc->refcount == 0)
3063 {
3064 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3065 if (!symbol)
3066 return false;
3067
3068 return remove_described_reference (symbol, rdesc);
3069 }
3070 return true;
3071 }
3072
3073 /* Try to find a destination for indirect edge IE that corresponds to a simple
3074 call or a call of a member function pointer and where the destination is a
3075 pointer formal parameter described by jump function JFUNC. If it can be
3076 determined, return the newly direct edge, otherwise return NULL.
3077 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3078
3079 static struct cgraph_edge *
3080 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3081 struct ipa_jump_func *jfunc,
3082 struct ipa_node_params *new_root_info)
3083 {
3084 struct cgraph_edge *cs;
3085 tree target;
3086 bool agg_contents = ie->indirect_info->agg_contents;
3087
3088 if (ie->indirect_info->agg_contents)
3089 target = ipa_find_agg_cst_for_param (&jfunc->agg,
3090 ie->indirect_info->offset,
3091 ie->indirect_info->by_ref);
3092 else
3093 target = ipa_value_from_jfunc (new_root_info, jfunc);
3094 if (!target)
3095 return NULL;
3096 cs = ipa_make_edge_direct_to_target (ie, target);
3097
3098 if (cs && !agg_contents)
3099 {
3100 bool ok;
3101 gcc_checking_assert (cs->callee
3102 && (cs != ie
3103 || jfunc->type != IPA_JF_CONST
3104 || !cgraph_node_for_jfunc (jfunc)
3105 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3106 ok = try_decrement_rdesc_refcount (jfunc);
3107 gcc_checking_assert (ok);
3108 }
3109
3110 return cs;
3111 }
3112
3113 /* Return the target to be used in cases of impossible devirtualization. IE
3114 and target (the latter can be NULL) are dumped when dumping is enabled. */
3115
3116 tree
3117 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3118 {
3119 if (dump_file)
3120 {
3121 if (target)
3122 fprintf (dump_file,
3123 "Type inconsistent devirtualization: %s/%i->%s\n",
3124 ie->caller->name (), ie->caller->order,
3125 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3126 else
3127 fprintf (dump_file,
3128 "No devirtualization target in %s/%i\n",
3129 ie->caller->name (), ie->caller->order);
3130 }
3131 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3132 cgraph_node::get_create (new_target);
3133 return new_target;
3134 }
3135
3136 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3137 call based on a formal parameter which is described by jump function JFUNC
3138 and if it can be determined, make it direct and return the direct edge.
3139 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3140 are relative to. */
3141
3142 static struct cgraph_edge *
3143 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3144 struct ipa_jump_func *jfunc,
3145 struct ipa_node_params *new_root_info,
3146 struct ipa_polymorphic_call_context *ctx_ptr)
3147 {
3148 tree binfo, target = NULL;
3149 bool speculative = false;
3150 bool updated = false;
3151
3152 if (!flag_devirtualize)
3153 return NULL;
3154
3155 /* If this is call of a function parameter, restrict its type
3156 based on knowlede of the context. */
3157 if (ctx_ptr && !ie->indirect_info->by_ref)
3158 {
3159 struct ipa_polymorphic_call_context ctx = *ctx_ptr;
3160
3161 ctx.offset_by (ie->indirect_info->offset);
3162
3163 if (ie->indirect_info->vptr_changed)
3164 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3165 ie->indirect_info->otr_type);
3166
3167 updated = ie->indirect_info->context.combine_with
3168 (ctx, ie->indirect_info->otr_type);
3169 }
3170
3171 /* Try to do lookup via known virtual table pointer value. */
3172 if (!ie->indirect_info->by_ref
3173 && (!ie->indirect_info->vptr_changed || flag_devirtualize_speculatively))
3174 {
3175 tree vtable;
3176 unsigned HOST_WIDE_INT offset;
3177 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
3178 ie->indirect_info->offset,
3179 true);
3180 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3181 {
3182 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3183 vtable, offset);
3184 if (t)
3185 {
3186 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3187 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3188 || !possible_polymorphic_call_target_p
3189 (ie, cgraph_node::get (t)))
3190 {
3191 /* Do not speculate builtin_unreachable, it is stpid! */
3192 if (!ie->indirect_info->vptr_changed)
3193 target = ipa_impossible_devirt_target (ie, target);
3194 }
3195 else
3196 {
3197 target = t;
3198 speculative = ie->indirect_info->vptr_changed;
3199 }
3200 }
3201 }
3202 }
3203
3204 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
3205
3206 if (binfo && TREE_CODE (binfo) != TREE_BINFO)
3207 {
3208 struct ipa_polymorphic_call_context ctx (binfo,
3209 ie->indirect_info->otr_type,
3210 ie->indirect_info->offset);
3211 updated |= ie->indirect_info->context.combine_with
3212 (ctx, ie->indirect_info->otr_type);
3213 }
3214
3215 if (updated)
3216 {
3217 ipa_polymorphic_call_context context (ie);
3218 vec <cgraph_node *>targets;
3219 bool final;
3220
3221 targets = possible_polymorphic_call_targets
3222 (ie->indirect_info->otr_type,
3223 ie->indirect_info->otr_token,
3224 context, &final);
3225 if (final && targets.length () <= 1)
3226 {
3227 if (targets.length () == 1)
3228 target = targets[0]->decl;
3229 else
3230 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3231 }
3232 else if (!target && flag_devirtualize_speculatively
3233 && !ie->speculative && ie->maybe_hot_p ())
3234 {
3235 cgraph_node *n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3236 ie->indirect_info->otr_token,
3237 ie->indirect_info->context);
3238 if (n)
3239 {
3240 target = n->decl;
3241 speculative = true;
3242 }
3243 }
3244 }
3245
3246 if (binfo && TREE_CODE (binfo) == TREE_BINFO)
3247 {
3248 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3249 ie->indirect_info->otr_type);
3250 if (binfo)
3251 {
3252 tree t = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3253 binfo);
3254 if (t)
3255 {
3256 gcc_assert (!target || speculative || target == t);
3257 target = t;
3258 speculative = false;
3259 }
3260 }
3261 }
3262
3263 if (target)
3264 {
3265 if (!possible_polymorphic_call_target_p (ie, cgraph_node::get_create (target)))
3266 {
3267 if (speculative)
3268 return NULL;
3269 target = ipa_impossible_devirt_target (ie, target);
3270 }
3271 return ipa_make_edge_direct_to_target (ie, target, speculative);
3272 }
3273 else
3274 return NULL;
3275 }
3276
3277 /* Update the param called notes associated with NODE when CS is being inlined,
3278 assuming NODE is (potentially indirectly) inlined into CS->callee.
3279 Moreover, if the callee is discovered to be constant, create a new cgraph
3280 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3281 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3282
3283 static bool
3284 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3285 struct cgraph_node *node,
3286 vec<cgraph_edge *> *new_edges)
3287 {
3288 struct ipa_edge_args *top;
3289 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3290 struct ipa_node_params *new_root_info;
3291 bool res = false;
3292
3293 ipa_check_create_edge_args ();
3294 top = IPA_EDGE_REF (cs);
3295 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3296 ? cs->caller->global.inlined_to
3297 : cs->caller);
3298
3299 for (ie = node->indirect_calls; ie; ie = next_ie)
3300 {
3301 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3302 struct ipa_jump_func *jfunc;
3303 int param_index;
3304
3305 next_ie = ie->next_callee;
3306
3307 if (ici->param_index == -1)
3308 continue;
3309
3310 /* We must check range due to calls with variable number of arguments: */
3311 if (ici->param_index >= ipa_get_cs_argument_count (top))
3312 {
3313 ici->param_index = -1;
3314 continue;
3315 }
3316
3317 param_index = ici->param_index;
3318 jfunc = ipa_get_ith_jump_func (top, param_index);
3319
3320 if (!flag_indirect_inlining)
3321 new_direct_edge = NULL;
3322 else if (ici->polymorphic)
3323 {
3324 ipa_polymorphic_call_context *ctx;
3325 ctx = ipa_get_ith_polymorhic_call_context (top, param_index);
3326 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3327 new_root_info,
3328 ctx);
3329 }
3330 else
3331 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3332 new_root_info);
3333 /* If speculation was removed, then we need to do nothing. */
3334 if (new_direct_edge && new_direct_edge != ie)
3335 {
3336 new_direct_edge->indirect_inlining_edge = 1;
3337 top = IPA_EDGE_REF (cs);
3338 res = true;
3339 }
3340 else if (new_direct_edge)
3341 {
3342 new_direct_edge->indirect_inlining_edge = 1;
3343 if (new_direct_edge->call_stmt)
3344 new_direct_edge->call_stmt_cannot_inline_p
3345 = !gimple_check_call_matching_types (
3346 new_direct_edge->call_stmt,
3347 new_direct_edge->callee->decl, false);
3348 if (new_edges)
3349 {
3350 new_edges->safe_push (new_direct_edge);
3351 res = true;
3352 }
3353 top = IPA_EDGE_REF (cs);
3354 }
3355 else if (jfunc->type == IPA_JF_PASS_THROUGH
3356 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3357 {
3358 if ((ici->agg_contents
3359 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3360 || (ici->polymorphic
3361 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3362 ici->param_index = -1;
3363 else
3364 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3365 }
3366 else if (jfunc->type == IPA_JF_ANCESTOR)
3367 {
3368 if ((ici->agg_contents
3369 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3370 || (ici->polymorphic
3371 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3372 ici->param_index = -1;
3373 else
3374 {
3375 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3376 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3377 }
3378 }
3379 else
3380 /* Either we can find a destination for this edge now or never. */
3381 ici->param_index = -1;
3382 }
3383
3384 return res;
3385 }
3386
3387 /* Recursively traverse subtree of NODE (including node) made of inlined
3388 cgraph_edges when CS has been inlined and invoke
3389 update_indirect_edges_after_inlining on all nodes and
3390 update_jump_functions_after_inlining on all non-inlined edges that lead out
3391 of this subtree. Newly discovered indirect edges will be added to
3392 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3393 created. */
3394
3395 static bool
3396 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3397 struct cgraph_node *node,
3398 vec<cgraph_edge *> *new_edges)
3399 {
3400 struct cgraph_edge *e;
3401 bool res;
3402
3403 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3404
3405 for (e = node->callees; e; e = e->next_callee)
3406 if (!e->inline_failed)
3407 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3408 else
3409 update_jump_functions_after_inlining (cs, e);
3410 for (e = node->indirect_calls; e; e = e->next_callee)
3411 update_jump_functions_after_inlining (cs, e);
3412
3413 return res;
3414 }
3415
3416 /* Combine two controlled uses counts as done during inlining. */
3417
3418 static int
3419 combine_controlled_uses_counters (int c, int d)
3420 {
3421 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3422 return IPA_UNDESCRIBED_USE;
3423 else
3424 return c + d - 1;
3425 }
3426
3427 /* Propagate number of controlled users from CS->caleee to the new root of the
3428 tree of inlined nodes. */
3429
3430 static void
3431 propagate_controlled_uses (struct cgraph_edge *cs)
3432 {
3433 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3434 struct cgraph_node *new_root = cs->caller->global.inlined_to
3435 ? cs->caller->global.inlined_to : cs->caller;
3436 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3437 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3438 int count, i;
3439
3440 count = MIN (ipa_get_cs_argument_count (args),
3441 ipa_get_param_count (old_root_info));
3442 for (i = 0; i < count; i++)
3443 {
3444 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3445 struct ipa_cst_ref_desc *rdesc;
3446
3447 if (jf->type == IPA_JF_PASS_THROUGH)
3448 {
3449 int src_idx, c, d;
3450 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3451 c = ipa_get_controlled_uses (new_root_info, src_idx);
3452 d = ipa_get_controlled_uses (old_root_info, i);
3453
3454 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3455 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3456 c = combine_controlled_uses_counters (c, d);
3457 ipa_set_controlled_uses (new_root_info, src_idx, c);
3458 if (c == 0 && new_root_info->ipcp_orig_node)
3459 {
3460 struct cgraph_node *n;
3461 struct ipa_ref *ref;
3462 tree t = new_root_info->known_vals[src_idx];
3463
3464 if (t && TREE_CODE (t) == ADDR_EXPR
3465 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3466 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3467 && (ref = new_root->find_reference (n, NULL, 0)))
3468 {
3469 if (dump_file)
3470 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3471 "reference from %s/%i to %s/%i.\n",
3472 xstrdup (new_root->name ()),
3473 new_root->order,
3474 xstrdup (n->name ()), n->order);
3475 ref->remove_reference ();
3476 }
3477 }
3478 }
3479 else if (jf->type == IPA_JF_CONST
3480 && (rdesc = jfunc_rdesc_usable (jf)))
3481 {
3482 int d = ipa_get_controlled_uses (old_root_info, i);
3483 int c = rdesc->refcount;
3484 rdesc->refcount = combine_controlled_uses_counters (c, d);
3485 if (rdesc->refcount == 0)
3486 {
3487 tree cst = ipa_get_jf_constant (jf);
3488 struct cgraph_node *n;
3489 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3490 && TREE_CODE (TREE_OPERAND (cst, 0))
3491 == FUNCTION_DECL);
3492 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3493 if (n)
3494 {
3495 struct cgraph_node *clone;
3496 bool ok;
3497 ok = remove_described_reference (n, rdesc);
3498 gcc_checking_assert (ok);
3499
3500 clone = cs->caller;
3501 while (clone->global.inlined_to
3502 && clone != rdesc->cs->caller
3503 && IPA_NODE_REF (clone)->ipcp_orig_node)
3504 {
3505 struct ipa_ref *ref;
3506 ref = clone->find_reference (n, NULL, 0);
3507 if (ref)
3508 {
3509 if (dump_file)
3510 fprintf (dump_file, "ipa-prop: Removing "
3511 "cloning-created reference "
3512 "from %s/%i to %s/%i.\n",
3513 xstrdup (clone->name ()),
3514 clone->order,
3515 xstrdup (n->name ()),
3516 n->order);
3517 ref->remove_reference ();
3518 }
3519 clone = clone->callers->caller;
3520 }
3521 }
3522 }
3523 }
3524 }
3525
3526 for (i = ipa_get_param_count (old_root_info);
3527 i < ipa_get_cs_argument_count (args);
3528 i++)
3529 {
3530 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3531
3532 if (jf->type == IPA_JF_CONST)
3533 {
3534 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3535 if (rdesc)
3536 rdesc->refcount = IPA_UNDESCRIBED_USE;
3537 }
3538 else if (jf->type == IPA_JF_PASS_THROUGH)
3539 ipa_set_controlled_uses (new_root_info,
3540 jf->value.pass_through.formal_id,
3541 IPA_UNDESCRIBED_USE);
3542 }
3543 }
3544
3545 /* Update jump functions and call note functions on inlining the call site CS.
3546 CS is expected to lead to a node already cloned by
3547 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3548 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3549 created. */
3550
3551 bool
3552 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3553 vec<cgraph_edge *> *new_edges)
3554 {
3555 bool changed;
3556 /* Do nothing if the preparation phase has not been carried out yet
3557 (i.e. during early inlining). */
3558 if (!ipa_node_params_vector.exists ())
3559 return false;
3560 gcc_assert (ipa_edge_args_vector);
3561
3562 propagate_controlled_uses (cs);
3563 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3564
3565 return changed;
3566 }
3567
3568 /* Frees all dynamically allocated structures that the argument info points
3569 to. */
3570
3571 void
3572 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3573 {
3574 vec_free (args->jump_functions);
3575 memset (args, 0, sizeof (*args));
3576 }
3577
3578 /* Free all ipa_edge structures. */
3579
3580 void
3581 ipa_free_all_edge_args (void)
3582 {
3583 int i;
3584 struct ipa_edge_args *args;
3585
3586 if (!ipa_edge_args_vector)
3587 return;
3588
3589 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3590 ipa_free_edge_args_substructures (args);
3591
3592 vec_free (ipa_edge_args_vector);
3593 }
3594
3595 /* Frees all dynamically allocated structures that the param info points
3596 to. */
3597
3598 void
3599 ipa_free_node_params_substructures (struct ipa_node_params *info)
3600 {
3601 info->descriptors.release ();
3602 free (info->lattices);
3603 /* Lattice values and their sources are deallocated with their alocation
3604 pool. */
3605 info->known_vals.release ();
3606 memset (info, 0, sizeof (*info));
3607 }
3608
3609 /* Free all ipa_node_params structures. */
3610
3611 void
3612 ipa_free_all_node_params (void)
3613 {
3614 int i;
3615 struct ipa_node_params *info;
3616
3617 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3618 ipa_free_node_params_substructures (info);
3619
3620 ipa_node_params_vector.release ();
3621 }
3622
3623 /* Set the aggregate replacements of NODE to be AGGVALS. */
3624
3625 void
3626 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3627 struct ipa_agg_replacement_value *aggvals)
3628 {
3629 if (vec_safe_length (ipa_node_agg_replacements)
3630 <= (unsigned) symtab->cgraph_max_uid)
3631 vec_safe_grow_cleared (ipa_node_agg_replacements,
3632 symtab->cgraph_max_uid + 1);
3633
3634 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3635 }
3636
3637 /* Hook that is called by cgraph.c when an edge is removed. */
3638
3639 static void
3640 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3641 {
3642 struct ipa_edge_args *args;
3643
3644 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3645 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3646 return;
3647
3648 args = IPA_EDGE_REF (cs);
3649 if (args->jump_functions)
3650 {
3651 struct ipa_jump_func *jf;
3652 int i;
3653 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3654 {
3655 struct ipa_cst_ref_desc *rdesc;
3656 try_decrement_rdesc_refcount (jf);
3657 if (jf->type == IPA_JF_CONST
3658 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3659 && rdesc->cs == cs)
3660 rdesc->cs = NULL;
3661 }
3662 }
3663
3664 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3665 }
3666
3667 /* Hook that is called by cgraph.c when a node is removed. */
3668
3669 static void
3670 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3671 {
3672 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3673 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3674 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3675 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3676 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3677 }
3678
3679 /* Hook that is called by cgraph.c when an edge is duplicated. */
3680
3681 static void
3682 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3683 __attribute__((unused)) void *data)
3684 {
3685 struct ipa_edge_args *old_args, *new_args;
3686 unsigned int i;
3687
3688 ipa_check_create_edge_args ();
3689
3690 old_args = IPA_EDGE_REF (src);
3691 new_args = IPA_EDGE_REF (dst);
3692
3693 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3694 if (old_args->polymorphic_call_contexts)
3695 new_args->polymorphic_call_contexts
3696 = vec_safe_copy (old_args->polymorphic_call_contexts);
3697
3698 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3699 {
3700 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3701 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3702
3703 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3704
3705 if (src_jf->type == IPA_JF_CONST)
3706 {
3707 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3708
3709 if (!src_rdesc)
3710 dst_jf->value.constant.rdesc = NULL;
3711 else if (src->caller == dst->caller)
3712 {
3713 struct ipa_ref *ref;
3714 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3715 gcc_checking_assert (n);
3716 ref = src->caller->find_reference (n, src->call_stmt,
3717 src->lto_stmt_uid);
3718 gcc_checking_assert (ref);
3719 dst->caller->clone_reference (ref, ref->stmt);
3720
3721 gcc_checking_assert (ipa_refdesc_pool);
3722 struct ipa_cst_ref_desc *dst_rdesc
3723 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3724 dst_rdesc->cs = dst;
3725 dst_rdesc->refcount = src_rdesc->refcount;
3726 dst_rdesc->next_duplicate = NULL;
3727 dst_jf->value.constant.rdesc = dst_rdesc;
3728 }
3729 else if (src_rdesc->cs == src)
3730 {
3731 struct ipa_cst_ref_desc *dst_rdesc;
3732 gcc_checking_assert (ipa_refdesc_pool);
3733 dst_rdesc
3734 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3735 dst_rdesc->cs = dst;
3736 dst_rdesc->refcount = src_rdesc->refcount;
3737 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3738 src_rdesc->next_duplicate = dst_rdesc;
3739 dst_jf->value.constant.rdesc = dst_rdesc;
3740 }
3741 else
3742 {
3743 struct ipa_cst_ref_desc *dst_rdesc;
3744 /* This can happen during inlining, when a JFUNC can refer to a
3745 reference taken in a function up in the tree of inline clones.
3746 We need to find the duplicate that refers to our tree of
3747 inline clones. */
3748
3749 gcc_assert (dst->caller->global.inlined_to);
3750 for (dst_rdesc = src_rdesc->next_duplicate;
3751 dst_rdesc;
3752 dst_rdesc = dst_rdesc->next_duplicate)
3753 {
3754 struct cgraph_node *top;
3755 top = dst_rdesc->cs->caller->global.inlined_to
3756 ? dst_rdesc->cs->caller->global.inlined_to
3757 : dst_rdesc->cs->caller;
3758 if (dst->caller->global.inlined_to == top)
3759 break;
3760 }
3761 gcc_assert (dst_rdesc);
3762 dst_jf->value.constant.rdesc = dst_rdesc;
3763 }
3764 }
3765 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3766 && src->caller == dst->caller)
3767 {
3768 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3769 ? dst->caller->global.inlined_to : dst->caller;
3770 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3771 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3772
3773 int c = ipa_get_controlled_uses (root_info, idx);
3774 if (c != IPA_UNDESCRIBED_USE)
3775 {
3776 c++;
3777 ipa_set_controlled_uses (root_info, idx, c);
3778 }
3779 }
3780 }
3781 }
3782
3783 /* Hook that is called by cgraph.c when a node is duplicated. */
3784
3785 static void
3786 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3787 ATTRIBUTE_UNUSED void *data)
3788 {
3789 struct ipa_node_params *old_info, *new_info;
3790 struct ipa_agg_replacement_value *old_av, *new_av;
3791
3792 ipa_check_create_node_params ();
3793 old_info = IPA_NODE_REF (src);
3794 new_info = IPA_NODE_REF (dst);
3795
3796 new_info->descriptors = old_info->descriptors.copy ();
3797 new_info->lattices = NULL;
3798 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3799
3800 new_info->analysis_done = old_info->analysis_done;
3801 new_info->node_enqueued = old_info->node_enqueued;
3802
3803 old_av = ipa_get_agg_replacements_for_node (src);
3804 if (!old_av)
3805 return;
3806
3807 new_av = NULL;
3808 while (old_av)
3809 {
3810 struct ipa_agg_replacement_value *v;
3811
3812 v = ggc_alloc<ipa_agg_replacement_value> ();
3813 memcpy (v, old_av, sizeof (*v));
3814 v->next = new_av;
3815 new_av = v;
3816 old_av = old_av->next;
3817 }
3818 ipa_set_node_agg_value_chain (dst, new_av);
3819 }
3820
3821
3822 /* Analyze newly added function into callgraph. */
3823
3824 static void
3825 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3826 {
3827 if (node->has_gimple_body_p ())
3828 ipa_analyze_node (node);
3829 }
3830
3831 /* Register our cgraph hooks if they are not already there. */
3832
3833 void
3834 ipa_register_cgraph_hooks (void)
3835 {
3836 if (!edge_removal_hook_holder)
3837 edge_removal_hook_holder =
3838 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3839 if (!node_removal_hook_holder)
3840 node_removal_hook_holder =
3841 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3842 if (!edge_duplication_hook_holder)
3843 edge_duplication_hook_holder =
3844 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3845 if (!node_duplication_hook_holder)
3846 node_duplication_hook_holder =
3847 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3848 function_insertion_hook_holder =
3849 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3850 }
3851
3852 /* Unregister our cgraph hooks if they are not already there. */
3853
3854 static void
3855 ipa_unregister_cgraph_hooks (void)
3856 {
3857 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3858 edge_removal_hook_holder = NULL;
3859 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3860 node_removal_hook_holder = NULL;
3861 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3862 edge_duplication_hook_holder = NULL;
3863 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3864 node_duplication_hook_holder = NULL;
3865 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3866 function_insertion_hook_holder = NULL;
3867 }
3868
3869 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3870 longer needed after ipa-cp. */
3871
3872 void
3873 ipa_free_all_structures_after_ipa_cp (void)
3874 {
3875 if (!optimize)
3876 {
3877 ipa_free_all_edge_args ();
3878 ipa_free_all_node_params ();
3879 free_alloc_pool (ipcp_sources_pool);
3880 free_alloc_pool (ipcp_values_pool);
3881 free_alloc_pool (ipcp_agg_lattice_pool);
3882 ipa_unregister_cgraph_hooks ();
3883 if (ipa_refdesc_pool)
3884 free_alloc_pool (ipa_refdesc_pool);
3885 }
3886 }
3887
3888 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3889 longer needed after indirect inlining. */
3890
3891 void
3892 ipa_free_all_structures_after_iinln (void)
3893 {
3894 ipa_free_all_edge_args ();
3895 ipa_free_all_node_params ();
3896 ipa_unregister_cgraph_hooks ();
3897 if (ipcp_sources_pool)
3898 free_alloc_pool (ipcp_sources_pool);
3899 if (ipcp_values_pool)
3900 free_alloc_pool (ipcp_values_pool);
3901 if (ipcp_agg_lattice_pool)
3902 free_alloc_pool (ipcp_agg_lattice_pool);
3903 if (ipa_refdesc_pool)
3904 free_alloc_pool (ipa_refdesc_pool);
3905 }
3906
3907 /* Print ipa_tree_map data structures of all functions in the
3908 callgraph to F. */
3909
3910 void
3911 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3912 {
3913 int i, count;
3914 struct ipa_node_params *info;
3915
3916 if (!node->definition)
3917 return;
3918 info = IPA_NODE_REF (node);
3919 fprintf (f, " function %s/%i parameter descriptors:\n",
3920 node->name (), node->order);
3921 count = ipa_get_param_count (info);
3922 for (i = 0; i < count; i++)
3923 {
3924 int c;
3925
3926 fprintf (f, " ");
3927 ipa_dump_param (f, info, i);
3928 if (ipa_is_param_used (info, i))
3929 fprintf (f, " used");
3930 c = ipa_get_controlled_uses (info, i);
3931 if (c == IPA_UNDESCRIBED_USE)
3932 fprintf (f, " undescribed_use");
3933 else
3934 fprintf (f, " controlled_uses=%i", c);
3935 fprintf (f, "\n");
3936 }
3937 }
3938
3939 /* Print ipa_tree_map data structures of all functions in the
3940 callgraph to F. */
3941
3942 void
3943 ipa_print_all_params (FILE * f)
3944 {
3945 struct cgraph_node *node;
3946
3947 fprintf (f, "\nFunction parameters:\n");
3948 FOR_EACH_FUNCTION (node)
3949 ipa_print_node_params (f, node);
3950 }
3951
3952 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3953
3954 vec<tree>
3955 ipa_get_vector_of_formal_parms (tree fndecl)
3956 {
3957 vec<tree> args;
3958 int count;
3959 tree parm;
3960
3961 gcc_assert (!flag_wpa);
3962 count = count_formal_params (fndecl);
3963 args.create (count);
3964 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3965 args.quick_push (parm);
3966
3967 return args;
3968 }
3969
3970 /* Return a heap allocated vector containing types of formal parameters of
3971 function type FNTYPE. */
3972
3973 vec<tree>
3974 ipa_get_vector_of_formal_parm_types (tree fntype)
3975 {
3976 vec<tree> types;
3977 int count = 0;
3978 tree t;
3979
3980 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3981 count++;
3982
3983 types.create (count);
3984 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3985 types.quick_push (TREE_VALUE (t));
3986
3987 return types;
3988 }
3989
3990 /* Modify the function declaration FNDECL and its type according to the plan in
3991 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3992 to reflect the actual parameters being modified which are determined by the
3993 base_index field. */
3994
3995 void
3996 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3997 {
3998 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3999 tree orig_type = TREE_TYPE (fndecl);
4000 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
4001
4002 /* The following test is an ugly hack, some functions simply don't have any
4003 arguments in their type. This is probably a bug but well... */
4004 bool care_for_types = (old_arg_types != NULL_TREE);
4005 bool last_parm_void;
4006 vec<tree> otypes;
4007 if (care_for_types)
4008 {
4009 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4010 == void_type_node);
4011 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
4012 if (last_parm_void)
4013 gcc_assert (oparms.length () + 1 == otypes.length ());
4014 else
4015 gcc_assert (oparms.length () == otypes.length ());
4016 }
4017 else
4018 {
4019 last_parm_void = false;
4020 otypes.create (0);
4021 }
4022
4023 int len = adjustments.length ();
4024 tree *link = &DECL_ARGUMENTS (fndecl);
4025 tree new_arg_types = NULL;
4026 for (int i = 0; i < len; i++)
4027 {
4028 struct ipa_parm_adjustment *adj;
4029 gcc_assert (link);
4030
4031 adj = &adjustments[i];
4032 tree parm;
4033 if (adj->op == IPA_PARM_OP_NEW)
4034 parm = NULL;
4035 else
4036 parm = oparms[adj->base_index];
4037 adj->base = parm;
4038
4039 if (adj->op == IPA_PARM_OP_COPY)
4040 {
4041 if (care_for_types)
4042 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4043 new_arg_types);
4044 *link = parm;
4045 link = &DECL_CHAIN (parm);
4046 }
4047 else if (adj->op != IPA_PARM_OP_REMOVE)
4048 {
4049 tree new_parm;
4050 tree ptype;
4051
4052 if (adj->by_ref)
4053 ptype = build_pointer_type (adj->type);
4054 else
4055 {
4056 ptype = adj->type;
4057 if (is_gimple_reg_type (ptype))
4058 {
4059 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4060 if (TYPE_ALIGN (ptype) < malign)
4061 ptype = build_aligned_type (ptype, malign);
4062 }
4063 }
4064
4065 if (care_for_types)
4066 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4067
4068 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4069 ptype);
4070 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4071 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4072 DECL_ARTIFICIAL (new_parm) = 1;
4073 DECL_ARG_TYPE (new_parm) = ptype;
4074 DECL_CONTEXT (new_parm) = fndecl;
4075 TREE_USED (new_parm) = 1;
4076 DECL_IGNORED_P (new_parm) = 1;
4077 layout_decl (new_parm, 0);
4078
4079 if (adj->op == IPA_PARM_OP_NEW)
4080 adj->base = NULL;
4081 else
4082 adj->base = parm;
4083 adj->new_decl = new_parm;
4084
4085 *link = new_parm;
4086 link = &DECL_CHAIN (new_parm);
4087 }
4088 }
4089
4090 *link = NULL_TREE;
4091
4092 tree new_reversed = NULL;
4093 if (care_for_types)
4094 {
4095 new_reversed = nreverse (new_arg_types);
4096 if (last_parm_void)
4097 {
4098 if (new_reversed)
4099 TREE_CHAIN (new_arg_types) = void_list_node;
4100 else
4101 new_reversed = void_list_node;
4102 }
4103 }
4104
4105 /* Use copy_node to preserve as much as possible from original type
4106 (debug info, attribute lists etc.)
4107 Exception is METHOD_TYPEs must have THIS argument.
4108 When we are asked to remove it, we need to build new FUNCTION_TYPE
4109 instead. */
4110 tree new_type = NULL;
4111 if (TREE_CODE (orig_type) != METHOD_TYPE
4112 || (adjustments[0].op == IPA_PARM_OP_COPY
4113 && adjustments[0].base_index == 0))
4114 {
4115 new_type = build_distinct_type_copy (orig_type);
4116 TYPE_ARG_TYPES (new_type) = new_reversed;
4117 }
4118 else
4119 {
4120 new_type
4121 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4122 new_reversed));
4123 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4124 DECL_VINDEX (fndecl) = NULL_TREE;
4125 }
4126
4127 /* When signature changes, we need to clear builtin info. */
4128 if (DECL_BUILT_IN (fndecl))
4129 {
4130 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4131 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4132 }
4133
4134 TREE_TYPE (fndecl) = new_type;
4135 DECL_VIRTUAL_P (fndecl) = 0;
4136 DECL_LANG_SPECIFIC (fndecl) = NULL;
4137 otypes.release ();
4138 oparms.release ();
4139 }
4140
4141 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4142 If this is a directly recursive call, CS must be NULL. Otherwise it must
4143 contain the corresponding call graph edge. */
4144
4145 void
4146 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
4147 ipa_parm_adjustment_vec adjustments)
4148 {
4149 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4150 vec<tree> vargs;
4151 vec<tree, va_gc> **debug_args = NULL;
4152 gimple new_stmt;
4153 gimple_stmt_iterator gsi, prev_gsi;
4154 tree callee_decl;
4155 int i, len;
4156
4157 len = adjustments.length ();
4158 vargs.create (len);
4159 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4160 current_node->remove_stmt_references (stmt);
4161
4162 gsi = gsi_for_stmt (stmt);
4163 prev_gsi = gsi;
4164 gsi_prev (&prev_gsi);
4165 for (i = 0; i < len; i++)
4166 {
4167 struct ipa_parm_adjustment *adj;
4168
4169 adj = &adjustments[i];
4170
4171 if (adj->op == IPA_PARM_OP_COPY)
4172 {
4173 tree arg = gimple_call_arg (stmt, adj->base_index);
4174
4175 vargs.quick_push (arg);
4176 }
4177 else if (adj->op != IPA_PARM_OP_REMOVE)
4178 {
4179 tree expr, base, off;
4180 location_t loc;
4181 unsigned int deref_align = 0;
4182 bool deref_base = false;
4183
4184 /* We create a new parameter out of the value of the old one, we can
4185 do the following kind of transformations:
4186
4187 - A scalar passed by reference is converted to a scalar passed by
4188 value. (adj->by_ref is false and the type of the original
4189 actual argument is a pointer to a scalar).
4190
4191 - A part of an aggregate is passed instead of the whole aggregate.
4192 The part can be passed either by value or by reference, this is
4193 determined by value of adj->by_ref. Moreover, the code below
4194 handles both situations when the original aggregate is passed by
4195 value (its type is not a pointer) and when it is passed by
4196 reference (it is a pointer to an aggregate).
4197
4198 When the new argument is passed by reference (adj->by_ref is true)
4199 it must be a part of an aggregate and therefore we form it by
4200 simply taking the address of a reference inside the original
4201 aggregate. */
4202
4203 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4204 base = gimple_call_arg (stmt, adj->base_index);
4205 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4206 : EXPR_LOCATION (base);
4207
4208 if (TREE_CODE (base) != ADDR_EXPR
4209 && POINTER_TYPE_P (TREE_TYPE (base)))
4210 off = build_int_cst (adj->alias_ptr_type,
4211 adj->offset / BITS_PER_UNIT);
4212 else
4213 {
4214 HOST_WIDE_INT base_offset;
4215 tree prev_base;
4216 bool addrof;
4217
4218 if (TREE_CODE (base) == ADDR_EXPR)
4219 {
4220 base = TREE_OPERAND (base, 0);
4221 addrof = true;
4222 }
4223 else
4224 addrof = false;
4225 prev_base = base;
4226 base = get_addr_base_and_unit_offset (base, &base_offset);
4227 /* Aggregate arguments can have non-invariant addresses. */
4228 if (!base)
4229 {
4230 base = build_fold_addr_expr (prev_base);
4231 off = build_int_cst (adj->alias_ptr_type,
4232 adj->offset / BITS_PER_UNIT);
4233 }
4234 else if (TREE_CODE (base) == MEM_REF)
4235 {
4236 if (!addrof)
4237 {
4238 deref_base = true;
4239 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4240 }
4241 off = build_int_cst (adj->alias_ptr_type,
4242 base_offset
4243 + adj->offset / BITS_PER_UNIT);
4244 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4245 off);
4246 base = TREE_OPERAND (base, 0);
4247 }
4248 else
4249 {
4250 off = build_int_cst (adj->alias_ptr_type,
4251 base_offset
4252 + adj->offset / BITS_PER_UNIT);
4253 base = build_fold_addr_expr (base);
4254 }
4255 }
4256
4257 if (!adj->by_ref)
4258 {
4259 tree type = adj->type;
4260 unsigned int align;
4261 unsigned HOST_WIDE_INT misalign;
4262
4263 if (deref_base)
4264 {
4265 align = deref_align;
4266 misalign = 0;
4267 }
4268 else
4269 {
4270 get_pointer_alignment_1 (base, &align, &misalign);
4271 if (TYPE_ALIGN (type) > align)
4272 align = TYPE_ALIGN (type);
4273 }
4274 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4275 * BITS_PER_UNIT);
4276 misalign = misalign & (align - 1);
4277 if (misalign != 0)
4278 align = (misalign & -misalign);
4279 if (align < TYPE_ALIGN (type))
4280 type = build_aligned_type (type, align);
4281 base = force_gimple_operand_gsi (&gsi, base,
4282 true, NULL, true, GSI_SAME_STMT);
4283 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4284 /* If expr is not a valid gimple call argument emit
4285 a load into a temporary. */
4286 if (is_gimple_reg_type (TREE_TYPE (expr)))
4287 {
4288 gimple tem = gimple_build_assign (NULL_TREE, expr);
4289 if (gimple_in_ssa_p (cfun))
4290 {
4291 gimple_set_vuse (tem, gimple_vuse (stmt));
4292 expr = make_ssa_name (TREE_TYPE (expr), tem);
4293 }
4294 else
4295 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4296 gimple_assign_set_lhs (tem, expr);
4297 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4298 }
4299 }
4300 else
4301 {
4302 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4303 expr = build_fold_addr_expr (expr);
4304 expr = force_gimple_operand_gsi (&gsi, expr,
4305 true, NULL, true, GSI_SAME_STMT);
4306 }
4307 vargs.quick_push (expr);
4308 }
4309 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4310 {
4311 unsigned int ix;
4312 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4313 gimple def_temp;
4314
4315 arg = gimple_call_arg (stmt, adj->base_index);
4316 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4317 {
4318 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4319 continue;
4320 arg = fold_convert_loc (gimple_location (stmt),
4321 TREE_TYPE (origin), arg);
4322 }
4323 if (debug_args == NULL)
4324 debug_args = decl_debug_args_insert (callee_decl);
4325 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4326 if (ddecl == origin)
4327 {
4328 ddecl = (**debug_args)[ix + 1];
4329 break;
4330 }
4331 if (ddecl == NULL)
4332 {
4333 ddecl = make_node (DEBUG_EXPR_DECL);
4334 DECL_ARTIFICIAL (ddecl) = 1;
4335 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4336 DECL_MODE (ddecl) = DECL_MODE (origin);
4337
4338 vec_safe_push (*debug_args, origin);
4339 vec_safe_push (*debug_args, ddecl);
4340 }
4341 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4342 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4343 }
4344 }
4345
4346 if (dump_file && (dump_flags & TDF_DETAILS))
4347 {
4348 fprintf (dump_file, "replacing stmt:");
4349 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4350 }
4351
4352 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4353 vargs.release ();
4354 if (gimple_call_lhs (stmt))
4355 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4356
4357 gimple_set_block (new_stmt, gimple_block (stmt));
4358 if (gimple_has_location (stmt))
4359 gimple_set_location (new_stmt, gimple_location (stmt));
4360 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4361 gimple_call_copy_flags (new_stmt, stmt);
4362 if (gimple_in_ssa_p (cfun))
4363 {
4364 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4365 if (gimple_vdef (stmt))
4366 {
4367 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4368 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4369 }
4370 }
4371
4372 if (dump_file && (dump_flags & TDF_DETAILS))
4373 {
4374 fprintf (dump_file, "with stmt:");
4375 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4376 fprintf (dump_file, "\n");
4377 }
4378 gsi_replace (&gsi, new_stmt, true);
4379 if (cs)
4380 cs->set_call_stmt (new_stmt);
4381 do
4382 {
4383 current_node->record_stmt_references (gsi_stmt (gsi));
4384 gsi_prev (&gsi);
4385 }
4386 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4387 }
4388
4389 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4390 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4391 specifies whether the function should care about type incompatibility the
4392 current and new expressions. If it is false, the function will leave
4393 incompatibility issues to the caller. Return true iff the expression
4394 was modified. */
4395
4396 bool
4397 ipa_modify_expr (tree *expr, bool convert,
4398 ipa_parm_adjustment_vec adjustments)
4399 {
4400 struct ipa_parm_adjustment *cand
4401 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4402 if (!cand)
4403 return false;
4404
4405 tree src;
4406 if (cand->by_ref)
4407 src = build_simple_mem_ref (cand->new_decl);
4408 else
4409 src = cand->new_decl;
4410
4411 if (dump_file && (dump_flags & TDF_DETAILS))
4412 {
4413 fprintf (dump_file, "About to replace expr ");
4414 print_generic_expr (dump_file, *expr, 0);
4415 fprintf (dump_file, " with ");
4416 print_generic_expr (dump_file, src, 0);
4417 fprintf (dump_file, "\n");
4418 }
4419
4420 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4421 {
4422 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4423 *expr = vce;
4424 }
4425 else
4426 *expr = src;
4427 return true;
4428 }
4429
4430 /* If T is an SSA_NAME, return NULL if it is not a default def or
4431 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4432 the base variable is always returned, regardless if it is a default
4433 def. Return T if it is not an SSA_NAME. */
4434
4435 static tree
4436 get_ssa_base_param (tree t, bool ignore_default_def)
4437 {
4438 if (TREE_CODE (t) == SSA_NAME)
4439 {
4440 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4441 return SSA_NAME_VAR (t);
4442 else
4443 return NULL_TREE;
4444 }
4445 return t;
4446 }
4447
4448 /* Given an expression, return an adjustment entry specifying the
4449 transformation to be done on EXPR. If no suitable adjustment entry
4450 was found, returns NULL.
4451
4452 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4453 default def, otherwise bail on them.
4454
4455 If CONVERT is non-NULL, this function will set *CONVERT if the
4456 expression provided is a component reference. ADJUSTMENTS is the
4457 adjustments vector. */
4458
4459 ipa_parm_adjustment *
4460 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4461 ipa_parm_adjustment_vec adjustments,
4462 bool ignore_default_def)
4463 {
4464 if (TREE_CODE (**expr) == BIT_FIELD_REF
4465 || TREE_CODE (**expr) == IMAGPART_EXPR
4466 || TREE_CODE (**expr) == REALPART_EXPR)
4467 {
4468 *expr = &TREE_OPERAND (**expr, 0);
4469 if (convert)
4470 *convert = true;
4471 }
4472
4473 HOST_WIDE_INT offset, size, max_size;
4474 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4475 if (!base || size == -1 || max_size == -1)
4476 return NULL;
4477
4478 if (TREE_CODE (base) == MEM_REF)
4479 {
4480 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4481 base = TREE_OPERAND (base, 0);
4482 }
4483
4484 base = get_ssa_base_param (base, ignore_default_def);
4485 if (!base || TREE_CODE (base) != PARM_DECL)
4486 return NULL;
4487
4488 struct ipa_parm_adjustment *cand = NULL;
4489 unsigned int len = adjustments.length ();
4490 for (unsigned i = 0; i < len; i++)
4491 {
4492 struct ipa_parm_adjustment *adj = &adjustments[i];
4493
4494 if (adj->base == base
4495 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4496 {
4497 cand = adj;
4498 break;
4499 }
4500 }
4501
4502 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4503 return NULL;
4504 return cand;
4505 }
4506
4507 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4508
4509 static bool
4510 index_in_adjustments_multiple_times_p (int base_index,
4511 ipa_parm_adjustment_vec adjustments)
4512 {
4513 int i, len = adjustments.length ();
4514 bool one = false;
4515
4516 for (i = 0; i < len; i++)
4517 {
4518 struct ipa_parm_adjustment *adj;
4519 adj = &adjustments[i];
4520
4521 if (adj->base_index == base_index)
4522 {
4523 if (one)
4524 return true;
4525 else
4526 one = true;
4527 }
4528 }
4529 return false;
4530 }
4531
4532
4533 /* Return adjustments that should have the same effect on function parameters
4534 and call arguments as if they were first changed according to adjustments in
4535 INNER and then by adjustments in OUTER. */
4536
4537 ipa_parm_adjustment_vec
4538 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4539 ipa_parm_adjustment_vec outer)
4540 {
4541 int i, outlen = outer.length ();
4542 int inlen = inner.length ();
4543 int removals = 0;
4544 ipa_parm_adjustment_vec adjustments, tmp;
4545
4546 tmp.create (inlen);
4547 for (i = 0; i < inlen; i++)
4548 {
4549 struct ipa_parm_adjustment *n;
4550 n = &inner[i];
4551
4552 if (n->op == IPA_PARM_OP_REMOVE)
4553 removals++;
4554 else
4555 {
4556 /* FIXME: Handling of new arguments are not implemented yet. */
4557 gcc_assert (n->op != IPA_PARM_OP_NEW);
4558 tmp.quick_push (*n);
4559 }
4560 }
4561
4562 adjustments.create (outlen + removals);
4563 for (i = 0; i < outlen; i++)
4564 {
4565 struct ipa_parm_adjustment r;
4566 struct ipa_parm_adjustment *out = &outer[i];
4567 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4568
4569 memset (&r, 0, sizeof (r));
4570 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4571 if (out->op == IPA_PARM_OP_REMOVE)
4572 {
4573 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4574 {
4575 r.op = IPA_PARM_OP_REMOVE;
4576 adjustments.quick_push (r);
4577 }
4578 continue;
4579 }
4580 else
4581 {
4582 /* FIXME: Handling of new arguments are not implemented yet. */
4583 gcc_assert (out->op != IPA_PARM_OP_NEW);
4584 }
4585
4586 r.base_index = in->base_index;
4587 r.type = out->type;
4588
4589 /* FIXME: Create nonlocal value too. */
4590
4591 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4592 r.op = IPA_PARM_OP_COPY;
4593 else if (in->op == IPA_PARM_OP_COPY)
4594 r.offset = out->offset;
4595 else if (out->op == IPA_PARM_OP_COPY)
4596 r.offset = in->offset;
4597 else
4598 r.offset = in->offset + out->offset;
4599 adjustments.quick_push (r);
4600 }
4601
4602 for (i = 0; i < inlen; i++)
4603 {
4604 struct ipa_parm_adjustment *n = &inner[i];
4605
4606 if (n->op == IPA_PARM_OP_REMOVE)
4607 adjustments.quick_push (*n);
4608 }
4609
4610 tmp.release ();
4611 return adjustments;
4612 }
4613
4614 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4615 friendly way, assuming they are meant to be applied to FNDECL. */
4616
4617 void
4618 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4619 tree fndecl)
4620 {
4621 int i, len = adjustments.length ();
4622 bool first = true;
4623 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4624
4625 fprintf (file, "IPA param adjustments: ");
4626 for (i = 0; i < len; i++)
4627 {
4628 struct ipa_parm_adjustment *adj;
4629 adj = &adjustments[i];
4630
4631 if (!first)
4632 fprintf (file, " ");
4633 else
4634 first = false;
4635
4636 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4637 print_generic_expr (file, parms[adj->base_index], 0);
4638 if (adj->base)
4639 {
4640 fprintf (file, ", base: ");
4641 print_generic_expr (file, adj->base, 0);
4642 }
4643 if (adj->new_decl)
4644 {
4645 fprintf (file, ", new_decl: ");
4646 print_generic_expr (file, adj->new_decl, 0);
4647 }
4648 if (adj->new_ssa_base)
4649 {
4650 fprintf (file, ", new_ssa_base: ");
4651 print_generic_expr (file, adj->new_ssa_base, 0);
4652 }
4653
4654 if (adj->op == IPA_PARM_OP_COPY)
4655 fprintf (file, ", copy_param");
4656 else if (adj->op == IPA_PARM_OP_REMOVE)
4657 fprintf (file, ", remove_param");
4658 else
4659 fprintf (file, ", offset %li", (long) adj->offset);
4660 if (adj->by_ref)
4661 fprintf (file, ", by_ref");
4662 print_node_brief (file, ", type: ", adj->type, 0);
4663 fprintf (file, "\n");
4664 }
4665 parms.release ();
4666 }
4667
4668 /* Dump the AV linked list. */
4669
4670 void
4671 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4672 {
4673 bool comma = false;
4674 fprintf (f, " Aggregate replacements:");
4675 for (; av; av = av->next)
4676 {
4677 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4678 av->index, av->offset);
4679 print_generic_expr (f, av->value, 0);
4680 comma = true;
4681 }
4682 fprintf (f, "\n");
4683 }
4684
4685 /* Stream out jump function JUMP_FUNC to OB. */
4686
4687 static void
4688 ipa_write_jump_function (struct output_block *ob,
4689 struct ipa_jump_func *jump_func)
4690 {
4691 struct ipa_agg_jf_item *item;
4692 struct bitpack_d bp;
4693 int i, count;
4694
4695 streamer_write_uhwi (ob, jump_func->type);
4696 switch (jump_func->type)
4697 {
4698 case IPA_JF_UNKNOWN:
4699 break;
4700 case IPA_JF_KNOWN_TYPE:
4701 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4702 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4703 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4704 break;
4705 case IPA_JF_CONST:
4706 gcc_assert (
4707 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4708 stream_write_tree (ob, jump_func->value.constant.value, true);
4709 break;
4710 case IPA_JF_PASS_THROUGH:
4711 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4712 if (jump_func->value.pass_through.operation == NOP_EXPR)
4713 {
4714 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4715 bp = bitpack_create (ob->main_stream);
4716 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4717 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4718 streamer_write_bitpack (&bp);
4719 }
4720 else
4721 {
4722 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4723 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4724 }
4725 break;
4726 case IPA_JF_ANCESTOR:
4727 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4728 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4729 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4730 bp = bitpack_create (ob->main_stream);
4731 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4732 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4733 streamer_write_bitpack (&bp);
4734 break;
4735 }
4736
4737 count = vec_safe_length (jump_func->agg.items);
4738 streamer_write_uhwi (ob, count);
4739 if (count)
4740 {
4741 bp = bitpack_create (ob->main_stream);
4742 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4743 streamer_write_bitpack (&bp);
4744 }
4745
4746 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4747 {
4748 streamer_write_uhwi (ob, item->offset);
4749 stream_write_tree (ob, item->value, true);
4750 }
4751 }
4752
4753 /* Read in jump function JUMP_FUNC from IB. */
4754
4755 static void
4756 ipa_read_jump_function (struct lto_input_block *ib,
4757 struct ipa_jump_func *jump_func,
4758 struct cgraph_edge *cs,
4759 struct data_in *data_in)
4760 {
4761 enum jump_func_type jftype;
4762 enum tree_code operation;
4763 int i, count;
4764
4765 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4766 switch (jftype)
4767 {
4768 case IPA_JF_UNKNOWN:
4769 jump_func->type = IPA_JF_UNKNOWN;
4770 break;
4771 case IPA_JF_KNOWN_TYPE:
4772 {
4773 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4774 tree base_type = stream_read_tree (ib, data_in);
4775 tree component_type = stream_read_tree (ib, data_in);
4776
4777 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4778 break;
4779 }
4780 case IPA_JF_CONST:
4781 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4782 break;
4783 case IPA_JF_PASS_THROUGH:
4784 operation = (enum tree_code) streamer_read_uhwi (ib);
4785 if (operation == NOP_EXPR)
4786 {
4787 int formal_id = streamer_read_uhwi (ib);
4788 struct bitpack_d bp = streamer_read_bitpack (ib);
4789 bool agg_preserved = bp_unpack_value (&bp, 1);
4790 bool type_preserved = bp_unpack_value (&bp, 1);
4791 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4792 type_preserved);
4793 }
4794 else
4795 {
4796 tree operand = stream_read_tree (ib, data_in);
4797 int formal_id = streamer_read_uhwi (ib);
4798 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4799 operation);
4800 }
4801 break;
4802 case IPA_JF_ANCESTOR:
4803 {
4804 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4805 tree type = stream_read_tree (ib, data_in);
4806 int formal_id = streamer_read_uhwi (ib);
4807 struct bitpack_d bp = streamer_read_bitpack (ib);
4808 bool agg_preserved = bp_unpack_value (&bp, 1);
4809 bool type_preserved = bp_unpack_value (&bp, 1);
4810
4811 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4812 type_preserved);
4813 break;
4814 }
4815 }
4816
4817 count = streamer_read_uhwi (ib);
4818 vec_alloc (jump_func->agg.items, count);
4819 if (count)
4820 {
4821 struct bitpack_d bp = streamer_read_bitpack (ib);
4822 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4823 }
4824 for (i = 0; i < count; i++)
4825 {
4826 struct ipa_agg_jf_item item;
4827 item.offset = streamer_read_uhwi (ib);
4828 item.value = stream_read_tree (ib, data_in);
4829 jump_func->agg.items->quick_push (item);
4830 }
4831 }
4832
4833 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4834 relevant to indirect inlining to OB. */
4835
4836 static void
4837 ipa_write_indirect_edge_info (struct output_block *ob,
4838 struct cgraph_edge *cs)
4839 {
4840 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4841 struct bitpack_d bp;
4842
4843 streamer_write_hwi (ob, ii->param_index);
4844 bp = bitpack_create (ob->main_stream);
4845 bp_pack_value (&bp, ii->polymorphic, 1);
4846 bp_pack_value (&bp, ii->agg_contents, 1);
4847 bp_pack_value (&bp, ii->member_ptr, 1);
4848 bp_pack_value (&bp, ii->by_ref, 1);
4849 bp_pack_value (&bp, ii->vptr_changed, 1);
4850 streamer_write_bitpack (&bp);
4851 if (ii->agg_contents || ii->polymorphic)
4852 streamer_write_hwi (ob, ii->offset);
4853 else
4854 gcc_assert (ii->offset == 0);
4855
4856 if (ii->polymorphic)
4857 {
4858 streamer_write_hwi (ob, ii->otr_token);
4859 stream_write_tree (ob, ii->otr_type, true);
4860 ii->context.stream_out (ob);
4861 }
4862 }
4863
4864 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4865 relevant to indirect inlining from IB. */
4866
4867 static void
4868 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4869 struct data_in *data_in,
4870 struct cgraph_edge *cs)
4871 {
4872 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4873 struct bitpack_d bp;
4874
4875 ii->param_index = (int) streamer_read_hwi (ib);
4876 bp = streamer_read_bitpack (ib);
4877 ii->polymorphic = bp_unpack_value (&bp, 1);
4878 ii->agg_contents = bp_unpack_value (&bp, 1);
4879 ii->member_ptr = bp_unpack_value (&bp, 1);
4880 ii->by_ref = bp_unpack_value (&bp, 1);
4881 ii->vptr_changed = bp_unpack_value (&bp, 1);
4882 if (ii->agg_contents || ii->polymorphic)
4883 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4884 else
4885 ii->offset = 0;
4886 if (ii->polymorphic)
4887 {
4888 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4889 ii->otr_type = stream_read_tree (ib, data_in);
4890 ii->context.stream_in (ib, data_in);
4891 }
4892 }
4893
4894 /* Stream out NODE info to OB. */
4895
4896 static void
4897 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4898 {
4899 int node_ref;
4900 lto_symtab_encoder_t encoder;
4901 struct ipa_node_params *info = IPA_NODE_REF (node);
4902 int j;
4903 struct cgraph_edge *e;
4904 struct bitpack_d bp;
4905
4906 encoder = ob->decl_state->symtab_node_encoder;
4907 node_ref = lto_symtab_encoder_encode (encoder, node);
4908 streamer_write_uhwi (ob, node_ref);
4909
4910 streamer_write_uhwi (ob, ipa_get_param_count (info));
4911 for (j = 0; j < ipa_get_param_count (info); j++)
4912 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4913 bp = bitpack_create (ob->main_stream);
4914 gcc_assert (info->analysis_done
4915 || ipa_get_param_count (info) == 0);
4916 gcc_assert (!info->node_enqueued);
4917 gcc_assert (!info->ipcp_orig_node);
4918 for (j = 0; j < ipa_get_param_count (info); j++)
4919 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4920 streamer_write_bitpack (&bp);
4921 for (j = 0; j < ipa_get_param_count (info); j++)
4922 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4923 for (e = node->callees; e; e = e->next_callee)
4924 {
4925 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4926
4927 streamer_write_uhwi (ob,
4928 ipa_get_cs_argument_count (args) * 2
4929 + (args->polymorphic_call_contexts != NULL));
4930 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4931 {
4932 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4933 if (args->polymorphic_call_contexts != NULL)
4934 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4935 }
4936 }
4937 for (e = node->indirect_calls; e; e = e->next_callee)
4938 {
4939 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4940
4941 streamer_write_uhwi (ob,
4942 ipa_get_cs_argument_count (args) * 2
4943 + (args->polymorphic_call_contexts != NULL));
4944 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4945 {
4946 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4947 if (args->polymorphic_call_contexts != NULL)
4948 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4949 }
4950 ipa_write_indirect_edge_info (ob, e);
4951 }
4952 }
4953
4954 /* Stream in NODE info from IB. */
4955
4956 static void
4957 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4958 struct data_in *data_in)
4959 {
4960 struct ipa_node_params *info = IPA_NODE_REF (node);
4961 int k;
4962 struct cgraph_edge *e;
4963 struct bitpack_d bp;
4964
4965 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4966
4967 for (k = 0; k < ipa_get_param_count (info); k++)
4968 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4969
4970 bp = streamer_read_bitpack (ib);
4971 if (ipa_get_param_count (info) != 0)
4972 info->analysis_done = true;
4973 info->node_enqueued = false;
4974 for (k = 0; k < ipa_get_param_count (info); k++)
4975 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4976 for (k = 0; k < ipa_get_param_count (info); k++)
4977 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4978 for (e = node->callees; e; e = e->next_callee)
4979 {
4980 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4981 int count = streamer_read_uhwi (ib);
4982 bool contexts_computed = count & 1;
4983 count /= 2;
4984
4985 if (!count)
4986 continue;
4987 vec_safe_grow_cleared (args->jump_functions, count);
4988 if (contexts_computed)
4989 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4990
4991 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4992 {
4993 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4994 data_in);
4995 if (contexts_computed)
4996 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4997 }
4998 }
4999 for (e = node->indirect_calls; e; e = e->next_callee)
5000 {
5001 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5002 int count = streamer_read_uhwi (ib);
5003 bool contexts_computed = count & 1;
5004 count /= 2;
5005
5006 if (count)
5007 {
5008 vec_safe_grow_cleared (args->jump_functions, count);
5009 if (contexts_computed)
5010 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5011 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5012 {
5013 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5014 data_in);
5015 if (contexts_computed)
5016 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5017 }
5018 }
5019 ipa_read_indirect_edge_info (ib, data_in, e);
5020 }
5021 }
5022
5023 /* Write jump functions for nodes in SET. */
5024
5025 void
5026 ipa_prop_write_jump_functions (void)
5027 {
5028 struct cgraph_node *node;
5029 struct output_block *ob;
5030 unsigned int count = 0;
5031 lto_symtab_encoder_iterator lsei;
5032 lto_symtab_encoder_t encoder;
5033
5034
5035 if (!ipa_node_params_vector.exists ())
5036 return;
5037
5038 ob = create_output_block (LTO_section_jump_functions);
5039 encoder = ob->decl_state->symtab_node_encoder;
5040 ob->symbol = NULL;
5041 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5042 lsei_next_function_in_partition (&lsei))
5043 {
5044 node = lsei_cgraph_node (lsei);
5045 if (node->has_gimple_body_p ()
5046 && IPA_NODE_REF (node) != NULL)
5047 count++;
5048 }
5049
5050 streamer_write_uhwi (ob, count);
5051
5052 /* Process all of the functions. */
5053 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5054 lsei_next_function_in_partition (&lsei))
5055 {
5056 node = lsei_cgraph_node (lsei);
5057 if (node->has_gimple_body_p ()
5058 && IPA_NODE_REF (node) != NULL)
5059 ipa_write_node_info (ob, node);
5060 }
5061 streamer_write_char_stream (ob->main_stream, 0);
5062 produce_asm (ob, NULL);
5063 destroy_output_block (ob);
5064 }
5065
5066 /* Read section in file FILE_DATA of length LEN with data DATA. */
5067
5068 static void
5069 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5070 size_t len)
5071 {
5072 const struct lto_function_header *header =
5073 (const struct lto_function_header *) data;
5074 const int cfg_offset = sizeof (struct lto_function_header);
5075 const int main_offset = cfg_offset + header->cfg_size;
5076 const int string_offset = main_offset + header->main_size;
5077 struct data_in *data_in;
5078 unsigned int i;
5079 unsigned int count;
5080
5081 lto_input_block ib_main ((const char *) data + main_offset,
5082 header->main_size);
5083
5084 data_in =
5085 lto_data_in_create (file_data, (const char *) data + string_offset,
5086 header->string_size, vNULL);
5087 count = streamer_read_uhwi (&ib_main);
5088
5089 for (i = 0; i < count; i++)
5090 {
5091 unsigned int index;
5092 struct cgraph_node *node;
5093 lto_symtab_encoder_t encoder;
5094
5095 index = streamer_read_uhwi (&ib_main);
5096 encoder = file_data->symtab_node_encoder;
5097 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5098 index));
5099 gcc_assert (node->definition);
5100 ipa_read_node_info (&ib_main, node, data_in);
5101 }
5102 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5103 len);
5104 lto_data_in_delete (data_in);
5105 }
5106
5107 /* Read ipcp jump functions. */
5108
5109 void
5110 ipa_prop_read_jump_functions (void)
5111 {
5112 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5113 struct lto_file_decl_data *file_data;
5114 unsigned int j = 0;
5115
5116 ipa_check_create_node_params ();
5117 ipa_check_create_edge_args ();
5118 ipa_register_cgraph_hooks ();
5119
5120 while ((file_data = file_data_vec[j++]))
5121 {
5122 size_t len;
5123 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5124
5125 if (data)
5126 ipa_prop_read_section (file_data, data, len);
5127 }
5128 }
5129
5130 /* After merging units, we can get mismatch in argument counts.
5131 Also decl merging might've rendered parameter lists obsolete.
5132 Also compute called_with_variable_arg info. */
5133
5134 void
5135 ipa_update_after_lto_read (void)
5136 {
5137 ipa_check_create_node_params ();
5138 ipa_check_create_edge_args ();
5139 }
5140
5141 void
5142 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
5143 {
5144 int node_ref;
5145 unsigned int count = 0;
5146 lto_symtab_encoder_t encoder;
5147 struct ipa_agg_replacement_value *aggvals, *av;
5148
5149 aggvals = ipa_get_agg_replacements_for_node (node);
5150 encoder = ob->decl_state->symtab_node_encoder;
5151 node_ref = lto_symtab_encoder_encode (encoder, node);
5152 streamer_write_uhwi (ob, node_ref);
5153
5154 for (av = aggvals; av; av = av->next)
5155 count++;
5156 streamer_write_uhwi (ob, count);
5157
5158 for (av = aggvals; av; av = av->next)
5159 {
5160 struct bitpack_d bp;
5161
5162 streamer_write_uhwi (ob, av->offset);
5163 streamer_write_uhwi (ob, av->index);
5164 stream_write_tree (ob, av->value, true);
5165
5166 bp = bitpack_create (ob->main_stream);
5167 bp_pack_value (&bp, av->by_ref, 1);
5168 streamer_write_bitpack (&bp);
5169 }
5170 }
5171
5172 /* Stream in the aggregate value replacement chain for NODE from IB. */
5173
5174 static void
5175 read_agg_replacement_chain (struct lto_input_block *ib,
5176 struct cgraph_node *node,
5177 struct data_in *data_in)
5178 {
5179 struct ipa_agg_replacement_value *aggvals = NULL;
5180 unsigned int count, i;
5181
5182 count = streamer_read_uhwi (ib);
5183 for (i = 0; i <count; i++)
5184 {
5185 struct ipa_agg_replacement_value *av;
5186 struct bitpack_d bp;
5187
5188 av = ggc_alloc<ipa_agg_replacement_value> ();
5189 av->offset = streamer_read_uhwi (ib);
5190 av->index = streamer_read_uhwi (ib);
5191 av->value = stream_read_tree (ib, data_in);
5192 bp = streamer_read_bitpack (ib);
5193 av->by_ref = bp_unpack_value (&bp, 1);
5194 av->next = aggvals;
5195 aggvals = av;
5196 }
5197 ipa_set_node_agg_value_chain (node, aggvals);
5198 }
5199
5200 /* Write all aggregate replacement for nodes in set. */
5201
5202 void
5203 ipa_prop_write_all_agg_replacement (void)
5204 {
5205 struct cgraph_node *node;
5206 struct output_block *ob;
5207 unsigned int count = 0;
5208 lto_symtab_encoder_iterator lsei;
5209 lto_symtab_encoder_t encoder;
5210
5211 if (!ipa_node_agg_replacements)
5212 return;
5213
5214 ob = create_output_block (LTO_section_ipcp_transform);
5215 encoder = ob->decl_state->symtab_node_encoder;
5216 ob->symbol = NULL;
5217 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5218 lsei_next_function_in_partition (&lsei))
5219 {
5220 node = lsei_cgraph_node (lsei);
5221 if (node->has_gimple_body_p ()
5222 && ipa_get_agg_replacements_for_node (node) != NULL)
5223 count++;
5224 }
5225
5226 streamer_write_uhwi (ob, count);
5227
5228 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5229 lsei_next_function_in_partition (&lsei))
5230 {
5231 node = lsei_cgraph_node (lsei);
5232 if (node->has_gimple_body_p ()
5233 && ipa_get_agg_replacements_for_node (node) != NULL)
5234 write_agg_replacement_chain (ob, node);
5235 }
5236 streamer_write_char_stream (ob->main_stream, 0);
5237 produce_asm (ob, NULL);
5238 destroy_output_block (ob);
5239 }
5240
5241 /* Read replacements section in file FILE_DATA of length LEN with data
5242 DATA. */
5243
5244 static void
5245 read_replacements_section (struct lto_file_decl_data *file_data,
5246 const char *data,
5247 size_t len)
5248 {
5249 const struct lto_function_header *header =
5250 (const struct lto_function_header *) data;
5251 const int cfg_offset = sizeof (struct lto_function_header);
5252 const int main_offset = cfg_offset + header->cfg_size;
5253 const int string_offset = main_offset + header->main_size;
5254 struct data_in *data_in;
5255 unsigned int i;
5256 unsigned int count;
5257
5258 lto_input_block ib_main ((const char *) data + main_offset,
5259 header->main_size);
5260
5261 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5262 header->string_size, vNULL);
5263 count = streamer_read_uhwi (&ib_main);
5264
5265 for (i = 0; i < count; i++)
5266 {
5267 unsigned int index;
5268 struct cgraph_node *node;
5269 lto_symtab_encoder_t encoder;
5270
5271 index = streamer_read_uhwi (&ib_main);
5272 encoder = file_data->symtab_node_encoder;
5273 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5274 index));
5275 gcc_assert (node->definition);
5276 read_agg_replacement_chain (&ib_main, node, data_in);
5277 }
5278 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5279 len);
5280 lto_data_in_delete (data_in);
5281 }
5282
5283 /* Read IPA-CP aggregate replacements. */
5284
5285 void
5286 ipa_prop_read_all_agg_replacement (void)
5287 {
5288 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5289 struct lto_file_decl_data *file_data;
5290 unsigned int j = 0;
5291
5292 while ((file_data = file_data_vec[j++]))
5293 {
5294 size_t len;
5295 const char *data = lto_get_section_data (file_data,
5296 LTO_section_ipcp_transform,
5297 NULL, &len);
5298 if (data)
5299 read_replacements_section (file_data, data, len);
5300 }
5301 }
5302
5303 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5304 NODE. */
5305
5306 static void
5307 adjust_agg_replacement_values (struct cgraph_node *node,
5308 struct ipa_agg_replacement_value *aggval)
5309 {
5310 struct ipa_agg_replacement_value *v;
5311 int i, c = 0, d = 0, *adj;
5312
5313 if (!node->clone.combined_args_to_skip)
5314 return;
5315
5316 for (v = aggval; v; v = v->next)
5317 {
5318 gcc_assert (v->index >= 0);
5319 if (c < v->index)
5320 c = v->index;
5321 }
5322 c++;
5323
5324 adj = XALLOCAVEC (int, c);
5325 for (i = 0; i < c; i++)
5326 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5327 {
5328 adj[i] = -1;
5329 d++;
5330 }
5331 else
5332 adj[i] = i - d;
5333
5334 for (v = aggval; v; v = v->next)
5335 v->index = adj[v->index];
5336 }
5337
5338 /* Dominator walker driving the ipcp modification phase. */
5339
5340 class ipcp_modif_dom_walker : public dom_walker
5341 {
5342 public:
5343 ipcp_modif_dom_walker (struct func_body_info *fbi,
5344 vec<ipa_param_descriptor> descs,
5345 struct ipa_agg_replacement_value *av,
5346 bool *sc, bool *cc)
5347 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5348 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5349
5350 virtual void before_dom_children (basic_block);
5351
5352 private:
5353 struct func_body_info *m_fbi;
5354 vec<ipa_param_descriptor> m_descriptors;
5355 struct ipa_agg_replacement_value *m_aggval;
5356 bool *m_something_changed, *m_cfg_changed;
5357 };
5358
5359 void
5360 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5361 {
5362 gimple_stmt_iterator gsi;
5363 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5364 {
5365 struct ipa_agg_replacement_value *v;
5366 gimple stmt = gsi_stmt (gsi);
5367 tree rhs, val, t;
5368 HOST_WIDE_INT offset, size;
5369 int index;
5370 bool by_ref, vce;
5371
5372 if (!gimple_assign_load_p (stmt))
5373 continue;
5374 rhs = gimple_assign_rhs1 (stmt);
5375 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5376 continue;
5377
5378 vce = false;
5379 t = rhs;
5380 while (handled_component_p (t))
5381 {
5382 /* V_C_E can do things like convert an array of integers to one
5383 bigger integer and similar things we do not handle below. */
5384 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5385 {
5386 vce = true;
5387 break;
5388 }
5389 t = TREE_OPERAND (t, 0);
5390 }
5391 if (vce)
5392 continue;
5393
5394 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5395 &offset, &size, &by_ref))
5396 continue;
5397 for (v = m_aggval; v; v = v->next)
5398 if (v->index == index
5399 && v->offset == offset)
5400 break;
5401 if (!v
5402 || v->by_ref != by_ref
5403 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5404 continue;
5405
5406 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5407 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5408 {
5409 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5410 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5411 else if (TYPE_SIZE (TREE_TYPE (rhs))
5412 == TYPE_SIZE (TREE_TYPE (v->value)))
5413 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5414 else
5415 {
5416 if (dump_file)
5417 {
5418 fprintf (dump_file, " const ");
5419 print_generic_expr (dump_file, v->value, 0);
5420 fprintf (dump_file, " can't be converted to type of ");
5421 print_generic_expr (dump_file, rhs, 0);
5422 fprintf (dump_file, "\n");
5423 }
5424 continue;
5425 }
5426 }
5427 else
5428 val = v->value;
5429
5430 if (dump_file && (dump_flags & TDF_DETAILS))
5431 {
5432 fprintf (dump_file, "Modifying stmt:\n ");
5433 print_gimple_stmt (dump_file, stmt, 0, 0);
5434 }
5435 gimple_assign_set_rhs_from_tree (&gsi, val);
5436 update_stmt (stmt);
5437
5438 if (dump_file && (dump_flags & TDF_DETAILS))
5439 {
5440 fprintf (dump_file, "into:\n ");
5441 print_gimple_stmt (dump_file, stmt, 0, 0);
5442 fprintf (dump_file, "\n");
5443 }
5444
5445 *m_something_changed = true;
5446 if (maybe_clean_eh_stmt (stmt)
5447 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5448 *m_cfg_changed = true;
5449 }
5450
5451 }
5452
5453 /* IPCP transformation phase doing propagation of aggregate values. */
5454
5455 unsigned int
5456 ipcp_transform_function (struct cgraph_node *node)
5457 {
5458 vec<ipa_param_descriptor> descriptors = vNULL;
5459 struct func_body_info fbi;
5460 struct ipa_agg_replacement_value *aggval;
5461 int param_count;
5462 bool cfg_changed = false, something_changed = false;
5463
5464 gcc_checking_assert (cfun);
5465 gcc_checking_assert (current_function_decl);
5466
5467 if (dump_file)
5468 fprintf (dump_file, "Modification phase of node %s/%i\n",
5469 node->name (), node->order);
5470
5471 aggval = ipa_get_agg_replacements_for_node (node);
5472 if (!aggval)
5473 return 0;
5474 param_count = count_formal_params (node->decl);
5475 if (param_count == 0)
5476 return 0;
5477 adjust_agg_replacement_values (node, aggval);
5478 if (dump_file)
5479 ipa_dump_agg_replacement_values (dump_file, aggval);
5480
5481 fbi.node = node;
5482 fbi.info = NULL;
5483 fbi.bb_infos = vNULL;
5484 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5485 fbi.param_count = param_count;
5486 fbi.aa_walked = 0;
5487
5488 descriptors.safe_grow_cleared (param_count);
5489 ipa_populate_param_decls (node, descriptors);
5490 calculate_dominance_info (CDI_DOMINATORS);
5491 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5492 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5493
5494 int i;
5495 struct ipa_bb_info *bi;
5496 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5497 free_ipa_bb_info (bi);
5498 fbi.bb_infos.release ();
5499 free_dominance_info (CDI_DOMINATORS);
5500 (*ipa_node_agg_replacements)[node->uid] = NULL;
5501 descriptors.release ();
5502
5503 if (!something_changed)
5504 return 0;
5505 else if (cfg_changed)
5506 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5507 else
5508 return TODO_update_ssa_only_virtuals;
5509 }