ipa-prop.h (ipa_get_jf_pass_through_type_preserved): use agg_preserved flag instead.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "predict.h"
25 #include "vec.h"
26 #include "hashtab.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "tm.h"
30 #include "hard-reg-set.h"
31 #include "input.h"
32 #include "function.h"
33 #include "dominance.h"
34 #include "cfg.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "expr.h"
44 #include "stor-layout.h"
45 #include "print-tree.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "langhooks.h"
51 #include "target.h"
52 #include "hash-map.h"
53 #include "plugin-api.h"
54 #include "ipa-ref.h"
55 #include "cgraph.h"
56 #include "alloc-pool.h"
57 #include "ipa-prop.h"
58 #include "bitmap.h"
59 #include "gimple-ssa.h"
60 #include "tree-cfg.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "tree-into-ssa.h"
64 #include "tree-dfa.h"
65 #include "tree-pass.h"
66 #include "tree-inline.h"
67 #include "ipa-inline.h"
68 #include "flags.h"
69 #include "diagnostic.h"
70 #include "gimple-pretty-print.h"
71 #include "lto-streamer.h"
72 #include "data-streamer.h"
73 #include "tree-streamer.h"
74 #include "params.h"
75 #include "ipa-utils.h"
76 #include "stringpool.h"
77 #include "tree-ssanames.h"
78 #include "dbgcnt.h"
79 #include "domwalk.h"
80 #include "builtins.h"
81 #include "calls.h"
82
83 /* Intermediate information that we get from alias analysis about a particular
84 parameter in a particular basic_block. When a parameter or the memory it
85 references is marked modified, we use that information in all dominatd
86 blocks without cosulting alias analysis oracle. */
87
88 struct param_aa_status
89 {
90 /* Set when this structure contains meaningful information. If not, the
91 structure describing a dominating BB should be used instead. */
92 bool valid;
93
94 /* Whether we have seen something which might have modified the data in
95 question. PARM is for the parameter itself, REF is for data it points to
96 but using the alias type of individual accesses and PT is the same thing
97 but for computing aggregate pass-through functions using a very inclusive
98 ao_ref. */
99 bool parm_modified, ref_modified, pt_modified;
100 };
101
102 /* Information related to a given BB that used only when looking at function
103 body. */
104
105 struct ipa_bb_info
106 {
107 /* Call graph edges going out of this BB. */
108 vec<cgraph_edge *> cg_edges;
109 /* Alias analysis statuses of each formal parameter at this bb. */
110 vec<param_aa_status> param_aa_statuses;
111 };
112
113 /* Structure with global information that is only used when looking at function
114 body. */
115
116 struct func_body_info
117 {
118 /* The node that is being analyzed. */
119 cgraph_node *node;
120
121 /* Its info. */
122 struct ipa_node_params *info;
123
124 /* Information about individual BBs. */
125 vec<ipa_bb_info> bb_infos;
126
127 /* Number of parameters. */
128 int param_count;
129
130 /* Number of statements already walked by when analyzing this function. */
131 unsigned int aa_walked;
132 };
133
134 /* Vector where the parameter infos are actually stored. */
135 vec<ipa_node_params> ipa_node_params_vector;
136 /* Vector of known aggregate values in cloned nodes. */
137 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
138 /* Vector where the parameter infos are actually stored. */
139 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
140
141 /* Holders of ipa cgraph hooks: */
142 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
143 static struct cgraph_node_hook_list *node_removal_hook_holder;
144 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
145 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
146 static struct cgraph_node_hook_list *function_insertion_hook_holder;
147
148 /* Description of a reference to an IPA constant. */
149 struct ipa_cst_ref_desc
150 {
151 /* Edge that corresponds to the statement which took the reference. */
152 struct cgraph_edge *cs;
153 /* Linked list of duplicates created when call graph edges are cloned. */
154 struct ipa_cst_ref_desc *next_duplicate;
155 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
156 if out of control. */
157 int refcount;
158 };
159
160 /* Allocation pool for reference descriptions. */
161
162 static alloc_pool ipa_refdesc_pool;
163
164 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
165 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
166
167 static bool
168 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
169 {
170 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
171 struct cl_optimization *os;
172
173 if (!fs_opts)
174 return false;
175 os = TREE_OPTIMIZATION (fs_opts);
176 return !os->x_optimize || !os->x_flag_ipa_cp;
177 }
178
179 /* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
181
182 static int
183 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
184 {
185 int i, count;
186
187 count = descriptors.length ();
188 for (i = 0; i < count; i++)
189 if (descriptors[i].decl == ptree)
190 return i;
191
192 return -1;
193 }
194
195 /* Return index of the formal whose tree is PTREE in function which corresponds
196 to INFO. */
197
198 int
199 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
200 {
201 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
202 }
203
204 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
205 NODE. */
206
207 static void
208 ipa_populate_param_decls (struct cgraph_node *node,
209 vec<ipa_param_descriptor> &descriptors)
210 {
211 tree fndecl;
212 tree fnargs;
213 tree parm;
214 int param_num;
215
216 fndecl = node->decl;
217 gcc_assert (gimple_has_body_p (fndecl));
218 fnargs = DECL_ARGUMENTS (fndecl);
219 param_num = 0;
220 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
221 {
222 descriptors[param_num].decl = parm;
223 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
224 true);
225 param_num++;
226 }
227 }
228
229 /* Return how many formal parameters FNDECL has. */
230
231 int
232 count_formal_params (tree fndecl)
233 {
234 tree parm;
235 int count = 0;
236 gcc_assert (gimple_has_body_p (fndecl));
237
238 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
239 count++;
240
241 return count;
242 }
243
244 /* Return the declaration of Ith formal parameter of the function corresponding
245 to INFO. Note there is no setter function as this array is built just once
246 using ipa_initialize_node_params. */
247
248 void
249 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
250 {
251 fprintf (file, "param #%i", i);
252 if (info->descriptors[i].decl)
253 {
254 fprintf (file, " ");
255 print_generic_expr (file, info->descriptors[i].decl, 0);
256 }
257 }
258
259 /* Initialize the ipa_node_params structure associated with NODE
260 to hold PARAM_COUNT parameters. */
261
262 void
263 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
264 {
265 struct ipa_node_params *info = IPA_NODE_REF (node);
266
267 if (!info->descriptors.exists () && param_count)
268 info->descriptors.safe_grow_cleared (param_count);
269 }
270
271 /* Initialize the ipa_node_params structure associated with NODE by counting
272 the function parameters, creating the descriptors and populating their
273 param_decls. */
274
275 void
276 ipa_initialize_node_params (struct cgraph_node *node)
277 {
278 struct ipa_node_params *info = IPA_NODE_REF (node);
279
280 if (!info->descriptors.exists ())
281 {
282 ipa_alloc_node_params (node, count_formal_params (node->decl));
283 ipa_populate_param_decls (node, info->descriptors);
284 }
285 }
286
287 /* Print the jump functions associated with call graph edge CS to file F. */
288
289 static void
290 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
291 {
292 int i, count;
293
294 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
295 for (i = 0; i < count; i++)
296 {
297 struct ipa_jump_func *jump_func;
298 enum jump_func_type type;
299
300 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
301 type = jump_func->type;
302
303 fprintf (f, " param %d: ", i);
304 if (type == IPA_JF_UNKNOWN)
305 fprintf (f, "UNKNOWN\n");
306 else if (type == IPA_JF_KNOWN_TYPE)
307 {
308 fprintf (f, "KNOWN TYPE: base ");
309 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
310 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
311 jump_func->value.known_type.offset);
312 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
313 fprintf (f, "\n");
314 }
315 else if (type == IPA_JF_CONST)
316 {
317 tree val = jump_func->value.constant.value;
318 fprintf (f, "CONST: ");
319 print_generic_expr (f, val, 0);
320 if (TREE_CODE (val) == ADDR_EXPR
321 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
322 {
323 fprintf (f, " -> ");
324 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
325 0);
326 }
327 fprintf (f, "\n");
328 }
329 else if (type == IPA_JF_PASS_THROUGH)
330 {
331 fprintf (f, "PASS THROUGH: ");
332 fprintf (f, "%d, op %s",
333 jump_func->value.pass_through.formal_id,
334 get_tree_code_name(jump_func->value.pass_through.operation));
335 if (jump_func->value.pass_through.operation != NOP_EXPR)
336 {
337 fprintf (f, " ");
338 print_generic_expr (f,
339 jump_func->value.pass_through.operand, 0);
340 }
341 if (jump_func->value.pass_through.agg_preserved)
342 fprintf (f, ", agg_preserved");
343 if (jump_func->value.pass_through.type_preserved)
344 fprintf (f, ", type_preserved");
345 fprintf (f, "\n");
346 }
347 else if (type == IPA_JF_ANCESTOR)
348 {
349 fprintf (f, "ANCESTOR: ");
350 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
351 jump_func->value.ancestor.formal_id,
352 jump_func->value.ancestor.offset);
353 print_generic_expr (f, jump_func->value.ancestor.type, 0);
354 if (jump_func->value.ancestor.agg_preserved)
355 fprintf (f, ", agg_preserved");
356 if (jump_func->value.ancestor.type_preserved)
357 fprintf (f, ", type_preserved");
358 fprintf (f, "\n");
359 }
360
361 if (jump_func->agg.items)
362 {
363 struct ipa_agg_jf_item *item;
364 int j;
365
366 fprintf (f, " Aggregate passed by %s:\n",
367 jump_func->agg.by_ref ? "reference" : "value");
368 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
369 {
370 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
371 item->offset);
372 if (TYPE_P (item->value))
373 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
374 tree_to_uhwi (TYPE_SIZE (item->value)));
375 else
376 {
377 fprintf (f, "cst: ");
378 print_generic_expr (f, item->value, 0);
379 }
380 fprintf (f, "\n");
381 }
382 }
383
384 struct ipa_polymorphic_call_context *ctx
385 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
386 if (ctx && !ctx->useless_p ())
387 {
388 fprintf (f, " Context: ");
389 ctx->dump (dump_file);
390 }
391 }
392 }
393
394
395 /* Print the jump functions of all arguments on all call graph edges going from
396 NODE to file F. */
397
398 void
399 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
400 {
401 struct cgraph_edge *cs;
402
403 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
404 node->order);
405 for (cs = node->callees; cs; cs = cs->next_callee)
406 {
407 if (!ipa_edge_args_info_available_for_edge_p (cs))
408 continue;
409
410 fprintf (f, " callsite %s/%i -> %s/%i : \n",
411 xstrdup (node->name ()), node->order,
412 xstrdup (cs->callee->name ()),
413 cs->callee->order);
414 ipa_print_node_jump_functions_for_edge (f, cs);
415 }
416
417 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
418 {
419 struct cgraph_indirect_call_info *ii;
420 if (!ipa_edge_args_info_available_for_edge_p (cs))
421 continue;
422
423 ii = cs->indirect_info;
424 if (ii->agg_contents)
425 fprintf (f, " indirect %s callsite, calling param %i, "
426 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
427 ii->member_ptr ? "member ptr" : "aggregate",
428 ii->param_index, ii->offset,
429 ii->by_ref ? "by reference" : "by_value");
430 else
431 fprintf (f, " indirect %s callsite, calling param %i, "
432 "offset " HOST_WIDE_INT_PRINT_DEC,
433 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
434 ii->offset);
435
436 if (cs->call_stmt)
437 {
438 fprintf (f, ", for stmt ");
439 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
440 }
441 else
442 fprintf (f, "\n");
443 if (ii->polymorphic)
444 ii->context.dump (f);
445 ipa_print_node_jump_functions_for_edge (f, cs);
446 }
447 }
448
449 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
450
451 void
452 ipa_print_all_jump_functions (FILE *f)
453 {
454 struct cgraph_node *node;
455
456 fprintf (f, "\nJump functions:\n");
457 FOR_EACH_FUNCTION (node)
458 {
459 ipa_print_node_jump_functions (f, node);
460 }
461 }
462
463 /* Set JFUNC to be a known type jump function. */
464
465 static void
466 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
467 tree base_type, tree component_type)
468 {
469 /* Recording and propagating main variants increases change that types
470 will match. */
471 base_type = TYPE_MAIN_VARIANT (base_type);
472 component_type = TYPE_MAIN_VARIANT (component_type);
473
474 gcc_assert (contains_polymorphic_type_p (base_type)
475 && contains_polymorphic_type_p (component_type));
476 if (!flag_devirtualize)
477 return;
478 jfunc->type = IPA_JF_KNOWN_TYPE;
479 jfunc->value.known_type.offset = offset,
480 jfunc->value.known_type.base_type = base_type;
481 jfunc->value.known_type.component_type = component_type;
482 gcc_assert (component_type);
483 }
484
485 /* Set JFUNC to be a copy of another jmp (to be used by jump function
486 combination code). The two functions will share their rdesc. */
487
488 static void
489 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
490 struct ipa_jump_func *src)
491
492 {
493 gcc_checking_assert (src->type == IPA_JF_CONST);
494 dst->type = IPA_JF_CONST;
495 dst->value.constant = src->value.constant;
496 }
497
498 /* Set JFUNC to be a constant jmp function. */
499
500 static void
501 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
502 struct cgraph_edge *cs)
503 {
504 constant = unshare_expr (constant);
505 if (constant && EXPR_P (constant))
506 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
507 jfunc->type = IPA_JF_CONST;
508 jfunc->value.constant.value = unshare_expr_without_location (constant);
509
510 if (TREE_CODE (constant) == ADDR_EXPR
511 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
512 {
513 struct ipa_cst_ref_desc *rdesc;
514 if (!ipa_refdesc_pool)
515 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
516 sizeof (struct ipa_cst_ref_desc), 32);
517
518 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
519 rdesc->cs = cs;
520 rdesc->next_duplicate = NULL;
521 rdesc->refcount = 1;
522 jfunc->value.constant.rdesc = rdesc;
523 }
524 else
525 jfunc->value.constant.rdesc = NULL;
526 }
527
528 /* Set JFUNC to be a simple pass-through jump function. */
529 static void
530 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
531 bool agg_preserved, bool type_preserved)
532 {
533 jfunc->type = IPA_JF_PASS_THROUGH;
534 jfunc->value.pass_through.operand = NULL_TREE;
535 jfunc->value.pass_through.formal_id = formal_id;
536 jfunc->value.pass_through.operation = NOP_EXPR;
537 jfunc->value.pass_through.agg_preserved = agg_preserved;
538 jfunc->value.pass_through.type_preserved = type_preserved;
539 }
540
541 /* Set JFUNC to be an arithmetic pass through jump function. */
542
543 static void
544 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
545 tree operand, enum tree_code operation)
546 {
547 jfunc->type = IPA_JF_PASS_THROUGH;
548 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
549 jfunc->value.pass_through.formal_id = formal_id;
550 jfunc->value.pass_through.operation = operation;
551 jfunc->value.pass_through.agg_preserved = false;
552 jfunc->value.pass_through.type_preserved = false;
553 }
554
555 /* Set JFUNC to be an ancestor jump function. */
556
557 static void
558 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
559 tree type, int formal_id, bool agg_preserved,
560 bool type_preserved)
561 {
562 if (!flag_devirtualize)
563 type_preserved = false;
564 if (!type_preserved)
565 type = NULL_TREE;
566 if (type)
567 type = TYPE_MAIN_VARIANT (type);
568 if (!type || !contains_polymorphic_type_p (type))
569 type_preserved = false;
570 jfunc->type = IPA_JF_ANCESTOR;
571 jfunc->value.ancestor.formal_id = formal_id;
572 jfunc->value.ancestor.offset = offset;
573 jfunc->value.ancestor.type = type_preserved ? type : NULL;
574 jfunc->value.ancestor.agg_preserved = agg_preserved;
575 jfunc->value.ancestor.type_preserved = type_preserved;
576 }
577
578 /* Extract the acual BINFO being described by JFUNC which must be a known type
579 jump function. */
580
581 tree
582 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
583 {
584 if (!RECORD_OR_UNION_TYPE_P (jfunc->value.known_type.base_type))
585 return NULL_TREE;
586
587 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
588
589 if (!base_binfo)
590 return NULL_TREE;
591 /* FIXME: At LTO we can't propagate to non-polymorphic type, because
592 we have no ODR equivalency on those. This should be fixed by
593 propagating on types rather than binfos that would make type
594 matching here unnecesary. */
595 if (in_lto_p
596 && (TREE_CODE (jfunc->value.known_type.component_type) != RECORD_TYPE
597 || !TYPE_BINFO (jfunc->value.known_type.component_type)
598 || !BINFO_VTABLE (TYPE_BINFO (jfunc->value.known_type.component_type))))
599 {
600 if (!jfunc->value.known_type.offset)
601 return base_binfo;
602 return NULL;
603 }
604 return get_binfo_at_offset (base_binfo,
605 jfunc->value.known_type.offset,
606 jfunc->value.known_type.component_type);
607 }
608
609 /* Get IPA BB information about the given BB. FBI is the context of analyzis
610 of this function body. */
611
612 static struct ipa_bb_info *
613 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
614 {
615 gcc_checking_assert (fbi);
616 return &fbi->bb_infos[bb->index];
617 }
618
619 /* Structure to be passed in between detect_type_change and
620 check_stmt_for_type_change. */
621
622 struct prop_type_change_info
623 {
624 /* Offset into the object where there is the virtual method pointer we are
625 looking for. */
626 HOST_WIDE_INT offset;
627 /* The declaration or SSA_NAME pointer of the base that we are checking for
628 type change. */
629 tree object;
630 /* If we actually can tell the type that the object has changed to, it is
631 stored in this field. Otherwise it remains NULL_TREE. */
632 tree known_current_type;
633 /* Set to true if dynamic type change has been detected. */
634 bool type_maybe_changed;
635 /* Set to true if multiple types have been encountered. known_current_type
636 must be disregarded in that case. */
637 bool multiple_types_encountered;
638 };
639
640 /* Return true if STMT can modify a virtual method table pointer.
641
642 This function makes special assumptions about both constructors and
643 destructors which are all the functions that are allowed to alter the VMT
644 pointers. It assumes that destructors begin with assignment into all VMT
645 pointers and that constructors essentially look in the following way:
646
647 1) The very first thing they do is that they call constructors of ancestor
648 sub-objects that have them.
649
650 2) Then VMT pointers of this and all its ancestors is set to new values
651 corresponding to the type corresponding to the constructor.
652
653 3) Only afterwards, other stuff such as constructor of member sub-objects
654 and the code written by the user is run. Only this may include calling
655 virtual functions, directly or indirectly.
656
657 There is no way to call a constructor of an ancestor sub-object in any
658 other way.
659
660 This means that we do not have to care whether constructors get the correct
661 type information because they will always change it (in fact, if we define
662 the type to be given by the VMT pointer, it is undefined).
663
664 The most important fact to derive from the above is that if, for some
665 statement in the section 3, we try to detect whether the dynamic type has
666 changed, we can safely ignore all calls as we examine the function body
667 backwards until we reach statements in section 2 because these calls cannot
668 be ancestor constructors or destructors (if the input is not bogus) and so
669 do not change the dynamic type (this holds true only for automatically
670 allocated objects but at the moment we devirtualize only these). We then
671 must detect that statements in section 2 change the dynamic type and can try
672 to derive the new type. That is enough and we can stop, we will never see
673 the calls into constructors of sub-objects in this code. Therefore we can
674 safely ignore all call statements that we traverse.
675 */
676
677 static bool
678 stmt_may_be_vtbl_ptr_store (gimple stmt)
679 {
680 if (is_gimple_call (stmt))
681 return false;
682 if (gimple_clobber_p (stmt))
683 return false;
684 else if (is_gimple_assign (stmt))
685 {
686 tree lhs = gimple_assign_lhs (stmt);
687
688 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
689 {
690 if (flag_strict_aliasing
691 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
692 return false;
693
694 if (TREE_CODE (lhs) == COMPONENT_REF
695 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
696 return false;
697 /* In the future we might want to use get_base_ref_and_offset to find
698 if there is a field corresponding to the offset and if so, proceed
699 almost like if it was a component ref. */
700 }
701 }
702 return true;
703 }
704
705 /* If STMT can be proved to be an assignment to the virtual method table
706 pointer of ANALYZED_OBJ and the type associated with the new table
707 identified, return the type. Otherwise return NULL_TREE. */
708
709 static tree
710 extr_type_from_vtbl_ptr_store (gimple stmt, struct prop_type_change_info *tci)
711 {
712 HOST_WIDE_INT offset, size, max_size;
713 tree lhs, rhs, base, binfo;
714
715 if (!gimple_assign_single_p (stmt))
716 return NULL_TREE;
717
718 lhs = gimple_assign_lhs (stmt);
719 rhs = gimple_assign_rhs1 (stmt);
720 if (TREE_CODE (lhs) != COMPONENT_REF
721 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
722 return NULL_TREE;
723
724 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
725 if (offset != tci->offset
726 || size != POINTER_SIZE
727 || max_size != POINTER_SIZE)
728 return NULL_TREE;
729 if (TREE_CODE (base) == MEM_REF)
730 {
731 if (TREE_CODE (tci->object) != MEM_REF
732 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
733 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
734 TREE_OPERAND (base, 1)))
735 return NULL_TREE;
736 }
737 else if (tci->object != base)
738 return NULL_TREE;
739
740 binfo = vtable_pointer_value_to_binfo (rhs);
741
742 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
743 base of outer type. In this case we would need to either
744 work on binfos or translate it back to outer type and offset.
745 KNOWN_TYPE jump functions are not ready for that, yet. */
746 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
747 return NULL;
748
749 return BINFO_TYPE (binfo);
750 }
751
752 /* Callback of walk_aliased_vdefs and a helper function for
753 detect_type_change to check whether a particular statement may modify
754 the virtual table pointer, and if possible also determine the new type of
755 the (sub-)object. It stores its result into DATA, which points to a
756 prop_type_change_info structure. */
757
758 static bool
759 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
760 {
761 gimple stmt = SSA_NAME_DEF_STMT (vdef);
762 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
763
764 if (stmt_may_be_vtbl_ptr_store (stmt))
765 {
766 tree type;
767
768 type = extr_type_from_vtbl_ptr_store (stmt, tci);
769 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
770 if (tci->type_maybe_changed
771 && type != tci->known_current_type)
772 tci->multiple_types_encountered = true;
773 tci->known_current_type = type;
774 tci->type_maybe_changed = true;
775 return true;
776 }
777 else
778 return false;
779 }
780
781 /* See if ARG is PARAM_DECl describing instance passed by pointer
782 or reference in FUNCTION. Return false if the dynamic type may change
783 in between beggining of the function until CALL is invoked.
784
785 Generally functions are not allowed to change type of such instances,
786 but they call destructors. We assume that methods can not destroy the THIS
787 pointer. Also as a special cases, constructor and destructors may change
788 type of the THIS pointer. */
789
790 static bool
791 param_type_may_change_p (tree function, tree arg, gimple call)
792 {
793 /* Pure functions can not do any changes on the dynamic type;
794 that require writting to memory. */
795 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
796 return false;
797 /* We need to check if we are within inlined consturctor
798 or destructor (ideally we would have way to check that the
799 inline cdtor is actually working on ARG, but we don't have
800 easy tie on this, so punt on all non-pure cdtors.
801 We may also record the types of cdtors and once we know type
802 of the instance match them.
803
804 Also code unification optimizations may merge calls from
805 different blocks making return values unreliable. So
806 do nothing during late optimization. */
807 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
808 return true;
809 if (TREE_CODE (arg) == SSA_NAME
810 && SSA_NAME_IS_DEFAULT_DEF (arg)
811 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
812 {
813 /* Normal (non-THIS) argument. */
814 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
815 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
816 /* THIS pointer of an method - here we we want to watch constructors
817 and destructors as those definitely may change the dynamic
818 type. */
819 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
820 && !DECL_CXX_CONSTRUCTOR_P (function)
821 && !DECL_CXX_DESTRUCTOR_P (function)
822 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
823 {
824 /* Walk the inline stack and watch out for ctors/dtors. */
825 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
826 block = BLOCK_SUPERCONTEXT (block))
827 if (BLOCK_ABSTRACT_ORIGIN (block)
828 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
829 {
830 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
831
832 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
833 continue;
834 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
835 && (DECL_CXX_CONSTRUCTOR_P (fn)
836 || DECL_CXX_DESTRUCTOR_P (fn)))
837 return true;
838 }
839 return false;
840 }
841 }
842 return true;
843 }
844
845 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
846 callsite CALL) by looking for assignments to its virtual table pointer. If
847 it is, return true and fill in the jump function JFUNC with relevant type
848 information or set it to unknown. ARG is the object itself (not a pointer
849 to it, unless dereferenced). BASE is the base of the memory access as
850 returned by get_ref_base_and_extent, as is the offset.
851
852 This is helper function for detect_type_change and detect_type_change_ssa
853 that does the heavy work which is usually unnecesary. */
854
855 static bool
856 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
857 gimple call, struct ipa_jump_func *jfunc,
858 HOST_WIDE_INT offset)
859 {
860 struct prop_type_change_info tci;
861 ao_ref ao;
862 bool entry_reached = false;
863
864 gcc_checking_assert (DECL_P (arg)
865 || TREE_CODE (arg) == MEM_REF
866 || handled_component_p (arg));
867
868 comp_type = TYPE_MAIN_VARIANT (comp_type);
869
870 /* Const calls cannot call virtual methods through VMT and so type changes do
871 not matter. */
872 if (!flag_devirtualize || !gimple_vuse (call)
873 /* Be sure expected_type is polymorphic. */
874 || !comp_type
875 || TREE_CODE (comp_type) != RECORD_TYPE
876 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
877 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
878 return true;
879
880 ao_ref_init (&ao, arg);
881 ao.base = base;
882 ao.offset = offset;
883 ao.size = POINTER_SIZE;
884 ao.max_size = ao.size;
885
886 tci.offset = offset;
887 tci.object = get_base_address (arg);
888 tci.known_current_type = NULL_TREE;
889 tci.type_maybe_changed = false;
890 tci.multiple_types_encountered = false;
891
892 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
893 &tci, NULL, &entry_reached);
894 if (!tci.type_maybe_changed)
895 return false;
896
897 if (!tci.known_current_type
898 || tci.multiple_types_encountered
899 || offset != 0
900 /* When the walk reached function entry, it means that type
901 is set along some paths but not along others. */
902 || entry_reached)
903 jfunc->type = IPA_JF_UNKNOWN;
904 else
905 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
906
907 return true;
908 }
909
910 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
911 If it is, return true and fill in the jump function JFUNC with relevant type
912 information or set it to unknown. ARG is the object itself (not a pointer
913 to it, unless dereferenced). BASE is the base of the memory access as
914 returned by get_ref_base_and_extent, as is the offset. */
915
916 static bool
917 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
918 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
919 {
920 if (!flag_devirtualize)
921 return false;
922
923 if (TREE_CODE (base) == MEM_REF
924 && !param_type_may_change_p (current_function_decl,
925 TREE_OPERAND (base, 0),
926 call))
927 return false;
928 return detect_type_change_from_memory_writes (arg, base, comp_type,
929 call, jfunc, offset);
930 }
931
932 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
933 SSA name (its dereference will become the base and the offset is assumed to
934 be zero). */
935
936 static bool
937 detect_type_change_ssa (tree arg, tree comp_type,
938 gimple call, struct ipa_jump_func *jfunc)
939 {
940 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
941 if (!flag_devirtualize
942 || !POINTER_TYPE_P (TREE_TYPE (arg)))
943 return false;
944
945 if (!param_type_may_change_p (current_function_decl, arg, call))
946 return false;
947
948 arg = build2 (MEM_REF, ptr_type_node, arg,
949 build_int_cst (ptr_type_node, 0));
950
951 return detect_type_change_from_memory_writes (arg, arg, comp_type,
952 call, jfunc, 0);
953 }
954
955 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
956 boolean variable pointed to by DATA. */
957
958 static bool
959 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
960 void *data)
961 {
962 bool *b = (bool *) data;
963 *b = true;
964 return true;
965 }
966
967 /* Return true if we have already walked so many statements in AA that we
968 should really just start giving up. */
969
970 static bool
971 aa_overwalked (struct func_body_info *fbi)
972 {
973 gcc_checking_assert (fbi);
974 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
975 }
976
977 /* Find the nearest valid aa status for parameter specified by INDEX that
978 dominates BB. */
979
980 static struct param_aa_status *
981 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
982 int index)
983 {
984 while (true)
985 {
986 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
987 if (!bb)
988 return NULL;
989 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
990 if (!bi->param_aa_statuses.is_empty ()
991 && bi->param_aa_statuses[index].valid)
992 return &bi->param_aa_statuses[index];
993 }
994 }
995
996 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
997 structures and/or intialize the result with a dominating description as
998 necessary. */
999
1000 static struct param_aa_status *
1001 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
1002 int index)
1003 {
1004 gcc_checking_assert (fbi);
1005 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1006 if (bi->param_aa_statuses.is_empty ())
1007 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
1008 struct param_aa_status *paa = &bi->param_aa_statuses[index];
1009 if (!paa->valid)
1010 {
1011 gcc_checking_assert (!paa->parm_modified
1012 && !paa->ref_modified
1013 && !paa->pt_modified);
1014 struct param_aa_status *dom_paa;
1015 dom_paa = find_dominating_aa_status (fbi, bb, index);
1016 if (dom_paa)
1017 *paa = *dom_paa;
1018 else
1019 paa->valid = true;
1020 }
1021
1022 return paa;
1023 }
1024
1025 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1026 a value known not to be modified in this function before reaching the
1027 statement STMT. FBI holds information about the function we have so far
1028 gathered but do not survive the summary building stage. */
1029
1030 static bool
1031 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
1032 gimple stmt, tree parm_load)
1033 {
1034 struct param_aa_status *paa;
1035 bool modified = false;
1036 ao_ref refd;
1037
1038 /* FIXME: FBI can be NULL if we are being called from outside
1039 ipa_node_analysis or ipcp_transform_function, which currently happens
1040 during inlining analysis. It would be great to extend fbi's lifetime and
1041 always have it. Currently, we are just not afraid of too much walking in
1042 that case. */
1043 if (fbi)
1044 {
1045 if (aa_overwalked (fbi))
1046 return false;
1047 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1048 if (paa->parm_modified)
1049 return false;
1050 }
1051 else
1052 paa = NULL;
1053
1054 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1055 ao_ref_init (&refd, parm_load);
1056 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1057 &modified, NULL);
1058 if (fbi)
1059 fbi->aa_walked += walked;
1060 if (paa && modified)
1061 paa->parm_modified = true;
1062 return !modified;
1063 }
1064
1065 /* If STMT is an assignment that loads a value from an parameter declaration,
1066 return the index of the parameter in ipa_node_params which has not been
1067 modified. Otherwise return -1. */
1068
1069 static int
1070 load_from_unmodified_param (struct func_body_info *fbi,
1071 vec<ipa_param_descriptor> descriptors,
1072 gimple stmt)
1073 {
1074 int index;
1075 tree op1;
1076
1077 if (!gimple_assign_single_p (stmt))
1078 return -1;
1079
1080 op1 = gimple_assign_rhs1 (stmt);
1081 if (TREE_CODE (op1) != PARM_DECL)
1082 return -1;
1083
1084 index = ipa_get_param_decl_index_1 (descriptors, op1);
1085 if (index < 0
1086 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1087 return -1;
1088
1089 return index;
1090 }
1091
1092 /* Return true if memory reference REF (which must be a load through parameter
1093 with INDEX) loads data that are known to be unmodified in this function
1094 before reaching statement STMT. */
1095
1096 static bool
1097 parm_ref_data_preserved_p (struct func_body_info *fbi,
1098 int index, gimple stmt, tree ref)
1099 {
1100 struct param_aa_status *paa;
1101 bool modified = false;
1102 ao_ref refd;
1103
1104 /* FIXME: FBI can be NULL if we are being called from outside
1105 ipa_node_analysis or ipcp_transform_function, which currently happens
1106 during inlining analysis. It would be great to extend fbi's lifetime and
1107 always have it. Currently, we are just not afraid of too much walking in
1108 that case. */
1109 if (fbi)
1110 {
1111 if (aa_overwalked (fbi))
1112 return false;
1113 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1114 if (paa->ref_modified)
1115 return false;
1116 }
1117 else
1118 paa = NULL;
1119
1120 gcc_checking_assert (gimple_vuse (stmt));
1121 ao_ref_init (&refd, ref);
1122 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1123 &modified, NULL);
1124 if (fbi)
1125 fbi->aa_walked += walked;
1126 if (paa && modified)
1127 paa->ref_modified = true;
1128 return !modified;
1129 }
1130
1131 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1132 is known to be unmodified in this function before reaching call statement
1133 CALL into which it is passed. FBI describes the function body. */
1134
1135 static bool
1136 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1137 gimple call, tree parm)
1138 {
1139 bool modified = false;
1140 ao_ref refd;
1141
1142 /* It's unnecessary to calculate anything about memory contnets for a const
1143 function because it is not goin to use it. But do not cache the result
1144 either. Also, no such calculations for non-pointers. */
1145 if (!gimple_vuse (call)
1146 || !POINTER_TYPE_P (TREE_TYPE (parm))
1147 || aa_overwalked (fbi))
1148 return false;
1149
1150 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1151 index);
1152 if (paa->pt_modified)
1153 return false;
1154
1155 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1156 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1157 &modified, NULL);
1158 fbi->aa_walked += walked;
1159 if (modified)
1160 paa->pt_modified = true;
1161 return !modified;
1162 }
1163
1164 /* Return true if we can prove that OP is a memory reference loading unmodified
1165 data from an aggregate passed as a parameter and if the aggregate is passed
1166 by reference, that the alias type of the load corresponds to the type of the
1167 formal parameter (so that we can rely on this type for TBAA in callers).
1168 INFO and PARMS_AINFO describe parameters of the current function (but the
1169 latter can be NULL), STMT is the load statement. If function returns true,
1170 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1171 within the aggregate and whether it is a load from a value passed by
1172 reference respectively. */
1173
1174 static bool
1175 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1176 vec<ipa_param_descriptor> descriptors,
1177 gimple stmt, tree op, int *index_p,
1178 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1179 bool *by_ref_p)
1180 {
1181 int index;
1182 HOST_WIDE_INT size, max_size;
1183 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1184
1185 if (max_size == -1 || max_size != size || *offset_p < 0)
1186 return false;
1187
1188 if (DECL_P (base))
1189 {
1190 int index = ipa_get_param_decl_index_1 (descriptors, base);
1191 if (index >= 0
1192 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1193 {
1194 *index_p = index;
1195 *by_ref_p = false;
1196 if (size_p)
1197 *size_p = size;
1198 return true;
1199 }
1200 return false;
1201 }
1202
1203 if (TREE_CODE (base) != MEM_REF
1204 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1205 || !integer_zerop (TREE_OPERAND (base, 1)))
1206 return false;
1207
1208 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1209 {
1210 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1211 index = ipa_get_param_decl_index_1 (descriptors, parm);
1212 }
1213 else
1214 {
1215 /* This branch catches situations where a pointer parameter is not a
1216 gimple register, for example:
1217
1218 void hip7(S*) (struct S * p)
1219 {
1220 void (*<T2e4>) (struct S *) D.1867;
1221 struct S * p.1;
1222
1223 <bb 2>:
1224 p.1_1 = p;
1225 D.1867_2 = p.1_1->f;
1226 D.1867_2 ();
1227 gdp = &p;
1228 */
1229
1230 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1231 index = load_from_unmodified_param (fbi, descriptors, def);
1232 }
1233
1234 if (index >= 0
1235 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1236 {
1237 *index_p = index;
1238 *by_ref_p = true;
1239 if (size_p)
1240 *size_p = size;
1241 return true;
1242 }
1243 return false;
1244 }
1245
1246 /* Just like the previous function, just without the param_analysis_info
1247 pointer, for users outside of this file. */
1248
1249 bool
1250 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1251 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1252 bool *by_ref_p)
1253 {
1254 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1255 offset_p, NULL, by_ref_p);
1256 }
1257
1258 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1259 of an assignment statement STMT, try to determine whether we are actually
1260 handling any of the following cases and construct an appropriate jump
1261 function into JFUNC if so:
1262
1263 1) The passed value is loaded from a formal parameter which is not a gimple
1264 register (most probably because it is addressable, the value has to be
1265 scalar) and we can guarantee the value has not changed. This case can
1266 therefore be described by a simple pass-through jump function. For example:
1267
1268 foo (int a)
1269 {
1270 int a.0;
1271
1272 a.0_2 = a;
1273 bar (a.0_2);
1274
1275 2) The passed value can be described by a simple arithmetic pass-through
1276 jump function. E.g.
1277
1278 foo (int a)
1279 {
1280 int D.2064;
1281
1282 D.2064_4 = a.1(D) + 4;
1283 bar (D.2064_4);
1284
1285 This case can also occur in combination of the previous one, e.g.:
1286
1287 foo (int a, int z)
1288 {
1289 int a.0;
1290 int D.2064;
1291
1292 a.0_3 = a;
1293 D.2064_4 = a.0_3 + 4;
1294 foo (D.2064_4);
1295
1296 3) The passed value is an address of an object within another one (which
1297 also passed by reference). Such situations are described by an ancestor
1298 jump function and describe situations such as:
1299
1300 B::foo() (struct B * const this)
1301 {
1302 struct A * D.1845;
1303
1304 D.1845_2 = &this_1(D)->D.1748;
1305 A::bar (D.1845_2);
1306
1307 INFO is the structure describing individual parameters access different
1308 stages of IPA optimizations. PARMS_AINFO contains the information that is
1309 only needed for intraprocedural analysis. */
1310
1311 static void
1312 compute_complex_assign_jump_func (struct func_body_info *fbi,
1313 struct ipa_node_params *info,
1314 struct ipa_jump_func *jfunc,
1315 gimple call, gimple stmt, tree name,
1316 tree param_type)
1317 {
1318 HOST_WIDE_INT offset, size, max_size;
1319 tree op1, tc_ssa, base, ssa;
1320 int index;
1321
1322 op1 = gimple_assign_rhs1 (stmt);
1323
1324 if (TREE_CODE (op1) == SSA_NAME)
1325 {
1326 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1327 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1328 else
1329 index = load_from_unmodified_param (fbi, info->descriptors,
1330 SSA_NAME_DEF_STMT (op1));
1331 tc_ssa = op1;
1332 }
1333 else
1334 {
1335 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1336 tc_ssa = gimple_assign_lhs (stmt);
1337 }
1338
1339 if (index >= 0)
1340 {
1341 tree op2 = gimple_assign_rhs2 (stmt);
1342
1343 if (op2)
1344 {
1345 if (!is_gimple_ip_invariant (op2)
1346 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1347 && !useless_type_conversion_p (TREE_TYPE (name),
1348 TREE_TYPE (op1))))
1349 return;
1350
1351 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1352 gimple_assign_rhs_code (stmt));
1353 }
1354 else if (gimple_assign_single_p (stmt))
1355 {
1356 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1357 bool type_p = false;
1358
1359 if (param_type && POINTER_TYPE_P (param_type))
1360 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1361 call, jfunc);
1362 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1363 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1364 }
1365 return;
1366 }
1367
1368 if (TREE_CODE (op1) != ADDR_EXPR)
1369 return;
1370 op1 = TREE_OPERAND (op1, 0);
1371 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1372 return;
1373 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1374 if (TREE_CODE (base) != MEM_REF
1375 /* If this is a varying address, punt. */
1376 || max_size == -1
1377 || max_size != size)
1378 return;
1379 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1380 ssa = TREE_OPERAND (base, 0);
1381 if (TREE_CODE (ssa) != SSA_NAME
1382 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1383 || offset < 0)
1384 return;
1385
1386 /* Dynamic types are changed in constructors and destructors. */
1387 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1388 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1389 {
1390 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1391 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1392 call, jfunc, offset));
1393 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1394 ipa_set_ancestor_jf (jfunc, offset,
1395 type_p ? TREE_TYPE (param_type) : NULL, index,
1396 parm_ref_data_pass_through_p (fbi, index,
1397 call, ssa), type_p);
1398 }
1399 }
1400
1401 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1402 it looks like:
1403
1404 iftmp.1_3 = &obj_2(D)->D.1762;
1405
1406 The base of the MEM_REF must be a default definition SSA NAME of a
1407 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1408 whole MEM_REF expression is returned and the offset calculated from any
1409 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1410 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1411
1412 static tree
1413 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1414 {
1415 HOST_WIDE_INT size, max_size;
1416 tree expr, parm, obj;
1417
1418 if (!gimple_assign_single_p (assign))
1419 return NULL_TREE;
1420 expr = gimple_assign_rhs1 (assign);
1421
1422 if (TREE_CODE (expr) != ADDR_EXPR)
1423 return NULL_TREE;
1424 expr = TREE_OPERAND (expr, 0);
1425 obj = expr;
1426 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1427
1428 if (TREE_CODE (expr) != MEM_REF
1429 /* If this is a varying address, punt. */
1430 || max_size == -1
1431 || max_size != size
1432 || *offset < 0)
1433 return NULL_TREE;
1434 parm = TREE_OPERAND (expr, 0);
1435 if (TREE_CODE (parm) != SSA_NAME
1436 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1437 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1438 return NULL_TREE;
1439
1440 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1441 *obj_p = obj;
1442 return expr;
1443 }
1444
1445
1446 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1447 statement PHI, try to find out whether NAME is in fact a
1448 multiple-inheritance typecast from a descendant into an ancestor of a formal
1449 parameter and thus can be described by an ancestor jump function and if so,
1450 write the appropriate function into JFUNC.
1451
1452 Essentially we want to match the following pattern:
1453
1454 if (obj_2(D) != 0B)
1455 goto <bb 3>;
1456 else
1457 goto <bb 4>;
1458
1459 <bb 3>:
1460 iftmp.1_3 = &obj_2(D)->D.1762;
1461
1462 <bb 4>:
1463 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1464 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1465 return D.1879_6; */
1466
1467 static void
1468 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1469 struct ipa_node_params *info,
1470 struct ipa_jump_func *jfunc,
1471 gimple call, gimple phi, tree param_type)
1472 {
1473 HOST_WIDE_INT offset;
1474 gimple assign, cond;
1475 basic_block phi_bb, assign_bb, cond_bb;
1476 tree tmp, parm, expr, obj;
1477 int index, i;
1478
1479 if (gimple_phi_num_args (phi) != 2)
1480 return;
1481
1482 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1483 tmp = PHI_ARG_DEF (phi, 0);
1484 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1485 tmp = PHI_ARG_DEF (phi, 1);
1486 else
1487 return;
1488 if (TREE_CODE (tmp) != SSA_NAME
1489 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1490 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1491 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1492 return;
1493
1494 assign = SSA_NAME_DEF_STMT (tmp);
1495 assign_bb = gimple_bb (assign);
1496 if (!single_pred_p (assign_bb))
1497 return;
1498 expr = get_ancestor_addr_info (assign, &obj, &offset);
1499 if (!expr)
1500 return;
1501 parm = TREE_OPERAND (expr, 0);
1502 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1503 if (index < 0)
1504 return;
1505
1506 cond_bb = single_pred (assign_bb);
1507 cond = last_stmt (cond_bb);
1508 if (!cond
1509 || gimple_code (cond) != GIMPLE_COND
1510 || gimple_cond_code (cond) != NE_EXPR
1511 || gimple_cond_lhs (cond) != parm
1512 || !integer_zerop (gimple_cond_rhs (cond)))
1513 return;
1514
1515 phi_bb = gimple_bb (phi);
1516 for (i = 0; i < 2; i++)
1517 {
1518 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1519 if (pred != assign_bb && pred != cond_bb)
1520 return;
1521 }
1522
1523 bool type_p = false;
1524 if (param_type && POINTER_TYPE_P (param_type)
1525 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
1526 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1527 call, jfunc, offset);
1528 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1529 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1530 index,
1531 parm_ref_data_pass_through_p (fbi, index, call, parm),
1532 type_p);
1533 }
1534
1535 /* Given OP which is passed as an actual argument to a called function,
1536 determine if it is possible to construct a KNOWN_TYPE jump function for it
1537 and if so, create one and store it to JFUNC.
1538 EXPECTED_TYPE represents a type the argument should be in */
1539
1540 static void
1541 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1542 gimple call, tree expected_type)
1543 {
1544 HOST_WIDE_INT offset, size, max_size;
1545 tree base;
1546
1547 if (!flag_devirtualize
1548 || TREE_CODE (op) != ADDR_EXPR
1549 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
1550 /* Be sure expected_type is polymorphic. */
1551 || !expected_type
1552 || !contains_polymorphic_type_p (expected_type))
1553 return;
1554
1555 op = TREE_OPERAND (op, 0);
1556 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1557 if (!DECL_P (base)
1558 || max_size == -1
1559 || max_size != size
1560 || !contains_polymorphic_type_p (TREE_TYPE (base)))
1561 return;
1562
1563 if (decl_maybe_in_construction_p (base, TREE_TYPE (base),
1564 call, current_function_decl)
1565 /* Even if the var seems to be in construction by inline call stack,
1566 we may work out the actual type by walking memory writes. */
1567 && (is_global_var (base)
1568 || detect_type_change (op, base, expected_type, call, jfunc, offset)))
1569 return;
1570
1571 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1572 expected_type);
1573 }
1574
1575 /* Inspect the given TYPE and return true iff it has the same structure (the
1576 same number of fields of the same types) as a C++ member pointer. If
1577 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1578 corresponding fields there. */
1579
1580 static bool
1581 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1582 {
1583 tree fld;
1584
1585 if (TREE_CODE (type) != RECORD_TYPE)
1586 return false;
1587
1588 fld = TYPE_FIELDS (type);
1589 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1590 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1591 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1592 return false;
1593
1594 if (method_ptr)
1595 *method_ptr = fld;
1596
1597 fld = DECL_CHAIN (fld);
1598 if (!fld || INTEGRAL_TYPE_P (fld)
1599 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1600 return false;
1601 if (delta)
1602 *delta = fld;
1603
1604 if (DECL_CHAIN (fld))
1605 return false;
1606
1607 return true;
1608 }
1609
1610 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1611 return the rhs of its defining statement. Otherwise return RHS as it
1612 is. */
1613
1614 static inline tree
1615 get_ssa_def_if_simple_copy (tree rhs)
1616 {
1617 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1618 {
1619 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1620
1621 if (gimple_assign_single_p (def_stmt))
1622 rhs = gimple_assign_rhs1 (def_stmt);
1623 else
1624 break;
1625 }
1626 return rhs;
1627 }
1628
1629 /* Simple linked list, describing known contents of an aggregate beforere
1630 call. */
1631
1632 struct ipa_known_agg_contents_list
1633 {
1634 /* Offset and size of the described part of the aggregate. */
1635 HOST_WIDE_INT offset, size;
1636 /* Known constant value or NULL if the contents is known to be unknown. */
1637 tree constant;
1638 /* Pointer to the next structure in the list. */
1639 struct ipa_known_agg_contents_list *next;
1640 };
1641
1642 /* Find the proper place in linked list of ipa_known_agg_contents_list
1643 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1644 unless there is a partial overlap, in which case return NULL, or such
1645 element is already there, in which case set *ALREADY_THERE to true. */
1646
1647 static struct ipa_known_agg_contents_list **
1648 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1649 HOST_WIDE_INT lhs_offset,
1650 HOST_WIDE_INT lhs_size,
1651 bool *already_there)
1652 {
1653 struct ipa_known_agg_contents_list **p = list;
1654 while (*p && (*p)->offset < lhs_offset)
1655 {
1656 if ((*p)->offset + (*p)->size > lhs_offset)
1657 return NULL;
1658 p = &(*p)->next;
1659 }
1660
1661 if (*p && (*p)->offset < lhs_offset + lhs_size)
1662 {
1663 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1664 /* We already know this value is subsequently overwritten with
1665 something else. */
1666 *already_there = true;
1667 else
1668 /* Otherwise this is a partial overlap which we cannot
1669 represent. */
1670 return NULL;
1671 }
1672 return p;
1673 }
1674
1675 /* Build aggregate jump function from LIST, assuming there are exactly
1676 CONST_COUNT constant entries there and that th offset of the passed argument
1677 is ARG_OFFSET and store it into JFUNC. */
1678
1679 static void
1680 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1681 int const_count, HOST_WIDE_INT arg_offset,
1682 struct ipa_jump_func *jfunc)
1683 {
1684 vec_alloc (jfunc->agg.items, const_count);
1685 while (list)
1686 {
1687 if (list->constant)
1688 {
1689 struct ipa_agg_jf_item item;
1690 item.offset = list->offset - arg_offset;
1691 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1692 item.value = unshare_expr_without_location (list->constant);
1693 jfunc->agg.items->quick_push (item);
1694 }
1695 list = list->next;
1696 }
1697 }
1698
1699 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1700 in ARG is filled in with constant values. ARG can either be an aggregate
1701 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1702 aggregate. JFUNC is the jump function into which the constants are
1703 subsequently stored. */
1704
1705 static void
1706 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1707 struct ipa_jump_func *jfunc)
1708 {
1709 struct ipa_known_agg_contents_list *list = NULL;
1710 int item_count = 0, const_count = 0;
1711 HOST_WIDE_INT arg_offset, arg_size;
1712 gimple_stmt_iterator gsi;
1713 tree arg_base;
1714 bool check_ref, by_ref;
1715 ao_ref r;
1716
1717 /* The function operates in three stages. First, we prepare check_ref, r,
1718 arg_base and arg_offset based on what is actually passed as an actual
1719 argument. */
1720
1721 if (POINTER_TYPE_P (arg_type))
1722 {
1723 by_ref = true;
1724 if (TREE_CODE (arg) == SSA_NAME)
1725 {
1726 tree type_size;
1727 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1728 return;
1729 check_ref = true;
1730 arg_base = arg;
1731 arg_offset = 0;
1732 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1733 arg_size = tree_to_uhwi (type_size);
1734 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1735 }
1736 else if (TREE_CODE (arg) == ADDR_EXPR)
1737 {
1738 HOST_WIDE_INT arg_max_size;
1739
1740 arg = TREE_OPERAND (arg, 0);
1741 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1742 &arg_max_size);
1743 if (arg_max_size == -1
1744 || arg_max_size != arg_size
1745 || arg_offset < 0)
1746 return;
1747 if (DECL_P (arg_base))
1748 {
1749 check_ref = false;
1750 ao_ref_init (&r, arg_base);
1751 }
1752 else
1753 return;
1754 }
1755 else
1756 return;
1757 }
1758 else
1759 {
1760 HOST_WIDE_INT arg_max_size;
1761
1762 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1763
1764 by_ref = false;
1765 check_ref = false;
1766 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1767 &arg_max_size);
1768 if (arg_max_size == -1
1769 || arg_max_size != arg_size
1770 || arg_offset < 0)
1771 return;
1772
1773 ao_ref_init (&r, arg);
1774 }
1775
1776 /* Second stage walks back the BB, looks at individual statements and as long
1777 as it is confident of how the statements affect contents of the
1778 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1779 describing it. */
1780 gsi = gsi_for_stmt (call);
1781 gsi_prev (&gsi);
1782 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1783 {
1784 struct ipa_known_agg_contents_list *n, **p;
1785 gimple stmt = gsi_stmt (gsi);
1786 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1787 tree lhs, rhs, lhs_base;
1788
1789 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1790 continue;
1791 if (!gimple_assign_single_p (stmt))
1792 break;
1793
1794 lhs = gimple_assign_lhs (stmt);
1795 rhs = gimple_assign_rhs1 (stmt);
1796 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1797 || TREE_CODE (lhs) == BIT_FIELD_REF
1798 || contains_bitfld_component_ref_p (lhs))
1799 break;
1800
1801 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1802 &lhs_max_size);
1803 if (lhs_max_size == -1
1804 || lhs_max_size != lhs_size)
1805 break;
1806
1807 if (check_ref)
1808 {
1809 if (TREE_CODE (lhs_base) != MEM_REF
1810 || TREE_OPERAND (lhs_base, 0) != arg_base
1811 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1812 break;
1813 }
1814 else if (lhs_base != arg_base)
1815 {
1816 if (DECL_P (lhs_base))
1817 continue;
1818 else
1819 break;
1820 }
1821
1822 bool already_there = false;
1823 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1824 &already_there);
1825 if (!p)
1826 break;
1827 if (already_there)
1828 continue;
1829
1830 rhs = get_ssa_def_if_simple_copy (rhs);
1831 n = XALLOCA (struct ipa_known_agg_contents_list);
1832 n->size = lhs_size;
1833 n->offset = lhs_offset;
1834 if (is_gimple_ip_invariant (rhs))
1835 {
1836 n->constant = rhs;
1837 const_count++;
1838 }
1839 else
1840 n->constant = NULL_TREE;
1841 n->next = *p;
1842 *p = n;
1843
1844 item_count++;
1845 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1846 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1847 break;
1848 }
1849
1850 /* Third stage just goes over the list and creates an appropriate vector of
1851 ipa_agg_jf_item structures out of it, of sourse only if there are
1852 any known constants to begin with. */
1853
1854 if (const_count)
1855 {
1856 jfunc->agg.by_ref = by_ref;
1857 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1858 }
1859 }
1860
1861 static tree
1862 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1863 {
1864 int n;
1865 tree type = (e->callee
1866 ? TREE_TYPE (e->callee->decl)
1867 : gimple_call_fntype (e->call_stmt));
1868 tree t = TYPE_ARG_TYPES (type);
1869
1870 for (n = 0; n < i; n++)
1871 {
1872 if (!t)
1873 break;
1874 t = TREE_CHAIN (t);
1875 }
1876 if (t)
1877 return TREE_VALUE (t);
1878 if (!e->callee)
1879 return NULL;
1880 t = DECL_ARGUMENTS (e->callee->decl);
1881 for (n = 0; n < i; n++)
1882 {
1883 if (!t)
1884 return NULL;
1885 t = TREE_CHAIN (t);
1886 }
1887 if (t)
1888 return TREE_TYPE (t);
1889 return NULL;
1890 }
1891
1892 /* Compute jump function for all arguments of callsite CS and insert the
1893 information in the jump_functions array in the ipa_edge_args corresponding
1894 to this callsite. */
1895
1896 static void
1897 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1898 struct cgraph_edge *cs)
1899 {
1900 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1901 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1902 gimple call = cs->call_stmt;
1903 int n, arg_num = gimple_call_num_args (call);
1904 bool useful_context = false;
1905
1906 if (arg_num == 0 || args->jump_functions)
1907 return;
1908 vec_safe_grow_cleared (args->jump_functions, arg_num);
1909 if (flag_devirtualize)
1910 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1911
1912 if (gimple_call_internal_p (call))
1913 return;
1914 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1915 return;
1916
1917 for (n = 0; n < arg_num; n++)
1918 {
1919 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1920 tree arg = gimple_call_arg (call, n);
1921 tree param_type = ipa_get_callee_param_type (cs, n);
1922 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1923 {
1924 tree instance;
1925 struct ipa_polymorphic_call_context context (cs->caller->decl,
1926 arg, cs->call_stmt,
1927 &instance);
1928 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1929 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1930 if (!context.useless_p ())
1931 useful_context = true;
1932 }
1933
1934 if (is_gimple_ip_invariant (arg))
1935 ipa_set_jf_constant (jfunc, arg, cs);
1936 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1937 && TREE_CODE (arg) == PARM_DECL)
1938 {
1939 int index = ipa_get_param_decl_index (info, arg);
1940
1941 gcc_assert (index >=0);
1942 /* Aggregate passed by value, check for pass-through, otherwise we
1943 will attempt to fill in aggregate contents later in this
1944 for cycle. */
1945 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1946 {
1947 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1948 continue;
1949 }
1950 }
1951 else if (TREE_CODE (arg) == SSA_NAME)
1952 {
1953 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1954 {
1955 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1956 if (index >= 0)
1957 {
1958 bool agg_p, type_p;
1959 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1960 if (param_type && POINTER_TYPE_P (param_type))
1961 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1962 call, jfunc);
1963 else
1964 type_p = false;
1965 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1966 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1967 type_p);
1968 }
1969 }
1970 else
1971 {
1972 gimple stmt = SSA_NAME_DEF_STMT (arg);
1973 if (is_gimple_assign (stmt))
1974 compute_complex_assign_jump_func (fbi, info, jfunc,
1975 call, stmt, arg, param_type);
1976 else if (gimple_code (stmt) == GIMPLE_PHI)
1977 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1978 call, stmt, param_type);
1979 }
1980 }
1981 else
1982 compute_known_type_jump_func (arg, jfunc, call,
1983 param_type
1984 && POINTER_TYPE_P (param_type)
1985 ? TREE_TYPE (param_type)
1986 : NULL);
1987
1988 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1989 passed (because type conversions are ignored in gimple). Usually we can
1990 safely get type from function declaration, but in case of K&R prototypes or
1991 variadic functions we can try our luck with type of the pointer passed.
1992 TODO: Since we look for actual initialization of the memory object, we may better
1993 work out the type based on the memory stores we find. */
1994 if (!param_type)
1995 param_type = TREE_TYPE (arg);
1996
1997 if ((jfunc->type != IPA_JF_PASS_THROUGH
1998 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1999 && (jfunc->type != IPA_JF_ANCESTOR
2000 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2001 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2002 || POINTER_TYPE_P (param_type)))
2003 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
2004 }
2005 if (!useful_context)
2006 vec_free (args->polymorphic_call_contexts);
2007 }
2008
2009 /* Compute jump functions for all edges - both direct and indirect - outgoing
2010 from BB. */
2011
2012 static void
2013 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
2014 {
2015 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2016 int i;
2017 struct cgraph_edge *cs;
2018
2019 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2020 {
2021 struct cgraph_node *callee = cs->callee;
2022
2023 if (callee)
2024 {
2025 callee->ultimate_alias_target ();
2026 /* We do not need to bother analyzing calls to unknown functions
2027 unless they may become known during lto/whopr. */
2028 if (!callee->definition && !flag_lto)
2029 continue;
2030 }
2031 ipa_compute_jump_functions_for_edge (fbi, cs);
2032 }
2033 }
2034
2035 /* If STMT looks like a statement loading a value from a member pointer formal
2036 parameter, return that parameter and store the offset of the field to
2037 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2038 might be clobbered). If USE_DELTA, then we look for a use of the delta
2039 field rather than the pfn. */
2040
2041 static tree
2042 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
2043 HOST_WIDE_INT *offset_p)
2044 {
2045 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2046
2047 if (!gimple_assign_single_p (stmt))
2048 return NULL_TREE;
2049
2050 rhs = gimple_assign_rhs1 (stmt);
2051 if (TREE_CODE (rhs) == COMPONENT_REF)
2052 {
2053 ref_field = TREE_OPERAND (rhs, 1);
2054 rhs = TREE_OPERAND (rhs, 0);
2055 }
2056 else
2057 ref_field = NULL_TREE;
2058 if (TREE_CODE (rhs) != MEM_REF)
2059 return NULL_TREE;
2060 rec = TREE_OPERAND (rhs, 0);
2061 if (TREE_CODE (rec) != ADDR_EXPR)
2062 return NULL_TREE;
2063 rec = TREE_OPERAND (rec, 0);
2064 if (TREE_CODE (rec) != PARM_DECL
2065 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2066 return NULL_TREE;
2067 ref_offset = TREE_OPERAND (rhs, 1);
2068
2069 if (use_delta)
2070 fld = delta_field;
2071 else
2072 fld = ptr_field;
2073 if (offset_p)
2074 *offset_p = int_bit_position (fld);
2075
2076 if (ref_field)
2077 {
2078 if (integer_nonzerop (ref_offset))
2079 return NULL_TREE;
2080 return ref_field == fld ? rec : NULL_TREE;
2081 }
2082 else
2083 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2084 : NULL_TREE;
2085 }
2086
2087 /* Returns true iff T is an SSA_NAME defined by a statement. */
2088
2089 static bool
2090 ipa_is_ssa_with_stmt_def (tree t)
2091 {
2092 if (TREE_CODE (t) == SSA_NAME
2093 && !SSA_NAME_IS_DEFAULT_DEF (t))
2094 return true;
2095 else
2096 return false;
2097 }
2098
2099 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2100 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2101 indirect call graph edge. */
2102
2103 static struct cgraph_edge *
2104 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
2105 {
2106 struct cgraph_edge *cs;
2107
2108 cs = node->get_edge (stmt);
2109 cs->indirect_info->param_index = param_index;
2110 cs->indirect_info->agg_contents = 0;
2111 cs->indirect_info->member_ptr = 0;
2112 return cs;
2113 }
2114
2115 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2116 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2117 intermediate information about each formal parameter. Currently it checks
2118 whether the call calls a pointer that is a formal parameter and if so, the
2119 parameter is marked with the called flag and an indirect call graph edge
2120 describing the call is created. This is very simple for ordinary pointers
2121 represented in SSA but not-so-nice when it comes to member pointers. The
2122 ugly part of this function does nothing more than trying to match the
2123 pattern of such a call. An example of such a pattern is the gimple dump
2124 below, the call is on the last line:
2125
2126 <bb 2>:
2127 f$__delta_5 = f.__delta;
2128 f$__pfn_24 = f.__pfn;
2129
2130 or
2131 <bb 2>:
2132 f$__delta_5 = MEM[(struct *)&f];
2133 f$__pfn_24 = MEM[(struct *)&f + 4B];
2134
2135 and a few lines below:
2136
2137 <bb 5>
2138 D.2496_3 = (int) f$__pfn_24;
2139 D.2497_4 = D.2496_3 & 1;
2140 if (D.2497_4 != 0)
2141 goto <bb 3>;
2142 else
2143 goto <bb 4>;
2144
2145 <bb 6>:
2146 D.2500_7 = (unsigned int) f$__delta_5;
2147 D.2501_8 = &S + D.2500_7;
2148 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2149 D.2503_10 = *D.2502_9;
2150 D.2504_12 = f$__pfn_24 + -1;
2151 D.2505_13 = (unsigned int) D.2504_12;
2152 D.2506_14 = D.2503_10 + D.2505_13;
2153 D.2507_15 = *D.2506_14;
2154 iftmp.11_16 = (String:: *) D.2507_15;
2155
2156 <bb 7>:
2157 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2158 D.2500_19 = (unsigned int) f$__delta_5;
2159 D.2508_20 = &S + D.2500_19;
2160 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2161
2162 Such patterns are results of simple calls to a member pointer:
2163
2164 int doprinting (int (MyString::* f)(int) const)
2165 {
2166 MyString S ("somestring");
2167
2168 return (S.*f)(4);
2169 }
2170
2171 Moreover, the function also looks for called pointers loaded from aggregates
2172 passed by value or reference. */
2173
2174 static void
2175 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2176 tree target)
2177 {
2178 struct ipa_node_params *info = fbi->info;
2179 HOST_WIDE_INT offset;
2180 bool by_ref;
2181
2182 if (SSA_NAME_IS_DEFAULT_DEF (target))
2183 {
2184 tree var = SSA_NAME_VAR (target);
2185 int index = ipa_get_param_decl_index (info, var);
2186 if (index >= 0)
2187 ipa_note_param_call (fbi->node, index, call);
2188 return;
2189 }
2190
2191 int index;
2192 gimple def = SSA_NAME_DEF_STMT (target);
2193 if (gimple_assign_single_p (def)
2194 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2195 gimple_assign_rhs1 (def), &index, &offset,
2196 NULL, &by_ref))
2197 {
2198 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2199 cs->indirect_info->offset = offset;
2200 cs->indirect_info->agg_contents = 1;
2201 cs->indirect_info->by_ref = by_ref;
2202 return;
2203 }
2204
2205 /* Now we need to try to match the complex pattern of calling a member
2206 pointer. */
2207 if (gimple_code (def) != GIMPLE_PHI
2208 || gimple_phi_num_args (def) != 2
2209 || !POINTER_TYPE_P (TREE_TYPE (target))
2210 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2211 return;
2212
2213 /* First, we need to check whether one of these is a load from a member
2214 pointer that is a parameter to this function. */
2215 tree n1 = PHI_ARG_DEF (def, 0);
2216 tree n2 = PHI_ARG_DEF (def, 1);
2217 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2218 return;
2219 gimple d1 = SSA_NAME_DEF_STMT (n1);
2220 gimple d2 = SSA_NAME_DEF_STMT (n2);
2221
2222 tree rec;
2223 basic_block bb, virt_bb;
2224 basic_block join = gimple_bb (def);
2225 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2226 {
2227 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2228 return;
2229
2230 bb = EDGE_PRED (join, 0)->src;
2231 virt_bb = gimple_bb (d2);
2232 }
2233 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2234 {
2235 bb = EDGE_PRED (join, 1)->src;
2236 virt_bb = gimple_bb (d1);
2237 }
2238 else
2239 return;
2240
2241 /* Second, we need to check that the basic blocks are laid out in the way
2242 corresponding to the pattern. */
2243
2244 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2245 || single_pred (virt_bb) != bb
2246 || single_succ (virt_bb) != join)
2247 return;
2248
2249 /* Third, let's see that the branching is done depending on the least
2250 significant bit of the pfn. */
2251
2252 gimple branch = last_stmt (bb);
2253 if (!branch || gimple_code (branch) != GIMPLE_COND)
2254 return;
2255
2256 if ((gimple_cond_code (branch) != NE_EXPR
2257 && gimple_cond_code (branch) != EQ_EXPR)
2258 || !integer_zerop (gimple_cond_rhs (branch)))
2259 return;
2260
2261 tree cond = gimple_cond_lhs (branch);
2262 if (!ipa_is_ssa_with_stmt_def (cond))
2263 return;
2264
2265 def = SSA_NAME_DEF_STMT (cond);
2266 if (!is_gimple_assign (def)
2267 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2268 || !integer_onep (gimple_assign_rhs2 (def)))
2269 return;
2270
2271 cond = gimple_assign_rhs1 (def);
2272 if (!ipa_is_ssa_with_stmt_def (cond))
2273 return;
2274
2275 def = SSA_NAME_DEF_STMT (cond);
2276
2277 if (is_gimple_assign (def)
2278 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2279 {
2280 cond = gimple_assign_rhs1 (def);
2281 if (!ipa_is_ssa_with_stmt_def (cond))
2282 return;
2283 def = SSA_NAME_DEF_STMT (cond);
2284 }
2285
2286 tree rec2;
2287 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2288 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2289 == ptrmemfunc_vbit_in_delta),
2290 NULL);
2291 if (rec != rec2)
2292 return;
2293
2294 index = ipa_get_param_decl_index (info, rec);
2295 if (index >= 0
2296 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2297 {
2298 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2299 cs->indirect_info->offset = offset;
2300 cs->indirect_info->agg_contents = 1;
2301 cs->indirect_info->member_ptr = 1;
2302 }
2303
2304 return;
2305 }
2306
2307 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2308 object referenced in the expression is a formal parameter of the caller
2309 FBI->node (described by FBI->info), create a call note for the
2310 statement. */
2311
2312 static void
2313 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2314 gimple call, tree target)
2315 {
2316 tree obj = OBJ_TYPE_REF_OBJECT (target);
2317 int index;
2318 HOST_WIDE_INT anc_offset;
2319
2320 if (!flag_devirtualize)
2321 return;
2322
2323 if (TREE_CODE (obj) != SSA_NAME)
2324 return;
2325
2326 struct ipa_node_params *info = fbi->info;
2327 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2328 {
2329 struct ipa_jump_func jfunc;
2330 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2331 return;
2332
2333 anc_offset = 0;
2334 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2335 gcc_assert (index >= 0);
2336 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2337 call, &jfunc))
2338 return;
2339 }
2340 else
2341 {
2342 struct ipa_jump_func jfunc;
2343 gimple stmt = SSA_NAME_DEF_STMT (obj);
2344 tree expr;
2345
2346 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2347 if (!expr)
2348 return;
2349 index = ipa_get_param_decl_index (info,
2350 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2351 gcc_assert (index >= 0);
2352 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2353 call, &jfunc, anc_offset))
2354 return;
2355 }
2356
2357 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2358 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2359 ii->offset = anc_offset;
2360 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2361 ii->otr_type = obj_type_ref_class (target);
2362 ii->polymorphic = 1;
2363 }
2364
2365 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2366 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2367 containing intermediate information about each formal parameter. */
2368
2369 static void
2370 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2371 {
2372 tree target = gimple_call_fn (call);
2373
2374 if (!target
2375 || (TREE_CODE (target) != SSA_NAME
2376 && !virtual_method_call_p (target)))
2377 return;
2378
2379 struct cgraph_edge *cs = fbi->node->get_edge (call);
2380 /* If we previously turned the call into a direct call, there is
2381 no need to analyze. */
2382 if (cs && !cs->indirect_unknown_callee)
2383 return;
2384
2385 if (cs->indirect_info->polymorphic)
2386 {
2387 tree instance;
2388 tree target = gimple_call_fn (call);
2389 ipa_polymorphic_call_context context (current_function_decl,
2390 target, call, &instance);
2391
2392 gcc_checking_assert (cs->indirect_info->otr_type
2393 == obj_type_ref_class (target));
2394 gcc_checking_assert (cs->indirect_info->otr_token
2395 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2396
2397 cs->indirect_info->vptr_changed
2398 = !context.get_dynamic_type (instance,
2399 OBJ_TYPE_REF_OBJECT (target),
2400 obj_type_ref_class (target), call);
2401 cs->indirect_info->context = context;
2402 }
2403
2404 if (TREE_CODE (target) == SSA_NAME)
2405 ipa_analyze_indirect_call_uses (fbi, call, target);
2406 else if (virtual_method_call_p (target))
2407 ipa_analyze_virtual_call_uses (fbi, call, target);
2408 }
2409
2410
2411 /* Analyze the call statement STMT with respect to formal parameters (described
2412 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2413 formal parameters are called. */
2414
2415 static void
2416 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2417 {
2418 if (is_gimple_call (stmt))
2419 ipa_analyze_call_uses (fbi, stmt);
2420 }
2421
2422 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2423 If OP is a parameter declaration, mark it as used in the info structure
2424 passed in DATA. */
2425
2426 static bool
2427 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2428 {
2429 struct ipa_node_params *info = (struct ipa_node_params *) data;
2430
2431 op = get_base_address (op);
2432 if (op
2433 && TREE_CODE (op) == PARM_DECL)
2434 {
2435 int index = ipa_get_param_decl_index (info, op);
2436 gcc_assert (index >= 0);
2437 ipa_set_param_used (info, index, true);
2438 }
2439
2440 return false;
2441 }
2442
2443 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2444 the findings in various structures of the associated ipa_node_params
2445 structure, such as parameter flags, notes etc. FBI holds various data about
2446 the function being analyzed. */
2447
2448 static void
2449 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2450 {
2451 gimple_stmt_iterator gsi;
2452 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2453 {
2454 gimple stmt = gsi_stmt (gsi);
2455
2456 if (is_gimple_debug (stmt))
2457 continue;
2458
2459 ipa_analyze_stmt_uses (fbi, stmt);
2460 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2461 visit_ref_for_mod_analysis,
2462 visit_ref_for_mod_analysis,
2463 visit_ref_for_mod_analysis);
2464 }
2465 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2466 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2467 visit_ref_for_mod_analysis,
2468 visit_ref_for_mod_analysis,
2469 visit_ref_for_mod_analysis);
2470 }
2471
2472 /* Calculate controlled uses of parameters of NODE. */
2473
2474 static void
2475 ipa_analyze_controlled_uses (struct cgraph_node *node)
2476 {
2477 struct ipa_node_params *info = IPA_NODE_REF (node);
2478
2479 for (int i = 0; i < ipa_get_param_count (info); i++)
2480 {
2481 tree parm = ipa_get_param (info, i);
2482 int controlled_uses = 0;
2483
2484 /* For SSA regs see if parameter is used. For non-SSA we compute
2485 the flag during modification analysis. */
2486 if (is_gimple_reg (parm))
2487 {
2488 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2489 parm);
2490 if (ddef && !has_zero_uses (ddef))
2491 {
2492 imm_use_iterator imm_iter;
2493 use_operand_p use_p;
2494
2495 ipa_set_param_used (info, i, true);
2496 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2497 if (!is_gimple_call (USE_STMT (use_p)))
2498 {
2499 if (!is_gimple_debug (USE_STMT (use_p)))
2500 {
2501 controlled_uses = IPA_UNDESCRIBED_USE;
2502 break;
2503 }
2504 }
2505 else
2506 controlled_uses++;
2507 }
2508 else
2509 controlled_uses = 0;
2510 }
2511 else
2512 controlled_uses = IPA_UNDESCRIBED_USE;
2513 ipa_set_controlled_uses (info, i, controlled_uses);
2514 }
2515 }
2516
2517 /* Free stuff in BI. */
2518
2519 static void
2520 free_ipa_bb_info (struct ipa_bb_info *bi)
2521 {
2522 bi->cg_edges.release ();
2523 bi->param_aa_statuses.release ();
2524 }
2525
2526 /* Dominator walker driving the analysis. */
2527
2528 class analysis_dom_walker : public dom_walker
2529 {
2530 public:
2531 analysis_dom_walker (struct func_body_info *fbi)
2532 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2533
2534 virtual void before_dom_children (basic_block);
2535
2536 private:
2537 struct func_body_info *m_fbi;
2538 };
2539
2540 void
2541 analysis_dom_walker::before_dom_children (basic_block bb)
2542 {
2543 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2544 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2545 }
2546
2547 /* Initialize the array describing properties of of formal parameters
2548 of NODE, analyze their uses and compute jump functions associated
2549 with actual arguments of calls from within NODE. */
2550
2551 void
2552 ipa_analyze_node (struct cgraph_node *node)
2553 {
2554 struct func_body_info fbi;
2555 struct ipa_node_params *info;
2556
2557 ipa_check_create_node_params ();
2558 ipa_check_create_edge_args ();
2559 info = IPA_NODE_REF (node);
2560
2561 if (info->analysis_done)
2562 return;
2563 info->analysis_done = 1;
2564
2565 if (ipa_func_spec_opts_forbid_analysis_p (node))
2566 {
2567 for (int i = 0; i < ipa_get_param_count (info); i++)
2568 {
2569 ipa_set_param_used (info, i, true);
2570 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2571 }
2572 return;
2573 }
2574
2575 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2576 push_cfun (func);
2577 calculate_dominance_info (CDI_DOMINATORS);
2578 ipa_initialize_node_params (node);
2579 ipa_analyze_controlled_uses (node);
2580
2581 fbi.node = node;
2582 fbi.info = IPA_NODE_REF (node);
2583 fbi.bb_infos = vNULL;
2584 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2585 fbi.param_count = ipa_get_param_count (info);
2586 fbi.aa_walked = 0;
2587
2588 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2589 {
2590 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2591 bi->cg_edges.safe_push (cs);
2592 }
2593
2594 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2595 {
2596 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2597 bi->cg_edges.safe_push (cs);
2598 }
2599
2600 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2601
2602 int i;
2603 struct ipa_bb_info *bi;
2604 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2605 free_ipa_bb_info (bi);
2606 fbi.bb_infos.release ();
2607 free_dominance_info (CDI_DOMINATORS);
2608 pop_cfun ();
2609 }
2610
2611 /* Update the jump function DST when the call graph edge corresponding to SRC is
2612 is being inlined, knowing that DST is of type ancestor and src of known
2613 type. */
2614
2615 static void
2616 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2617 struct ipa_jump_func *dst)
2618 {
2619 HOST_WIDE_INT combined_offset;
2620 tree combined_type;
2621
2622 if (!ipa_get_jf_ancestor_type_preserved (dst))
2623 {
2624 dst->type = IPA_JF_UNKNOWN;
2625 return;
2626 }
2627
2628 combined_offset = ipa_get_jf_known_type_offset (src)
2629 + ipa_get_jf_ancestor_offset (dst);
2630 combined_type = ipa_get_jf_ancestor_type (dst);
2631
2632 if (combined_type)
2633 ipa_set_jf_known_type (dst, combined_offset,
2634 ipa_get_jf_known_type_base_type (src),
2635 combined_type);
2636 else
2637 dst->type = IPA_JF_UNKNOWN;
2638 }
2639
2640 /* Update the jump functions associated with call graph edge E when the call
2641 graph edge CS is being inlined, assuming that E->caller is already (possibly
2642 indirectly) inlined into CS->callee and that E has not been inlined. */
2643
2644 static void
2645 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2646 struct cgraph_edge *e)
2647 {
2648 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2649 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2650 int count = ipa_get_cs_argument_count (args);
2651 int i;
2652
2653 for (i = 0; i < count; i++)
2654 {
2655 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2656 struct ipa_polymorphic_call_context *dst_ctx
2657 = ipa_get_ith_polymorhic_call_context (args, i);
2658
2659 if (dst->type == IPA_JF_ANCESTOR)
2660 {
2661 struct ipa_jump_func *src;
2662 int dst_fid = dst->value.ancestor.formal_id;
2663 struct ipa_polymorphic_call_context *src_ctx
2664 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2665
2666 /* Variable number of arguments can cause havoc if we try to access
2667 one that does not exist in the inlined edge. So make sure we
2668 don't. */
2669 if (dst_fid >= ipa_get_cs_argument_count (top))
2670 {
2671 dst->type = IPA_JF_UNKNOWN;
2672 continue;
2673 }
2674
2675 src = ipa_get_ith_jump_func (top, dst_fid);
2676
2677 if (src_ctx && !src_ctx->useless_p ())
2678 {
2679 struct ipa_polymorphic_call_context ctx = *src_ctx;
2680
2681 /* TODO: Make type preserved safe WRT contexts. */
2682 if (!ipa_get_jf_ancestor_type_preserved (dst))
2683 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2684 ctx.offset_by (dst->value.ancestor.offset);
2685 if (!ctx.useless_p ())
2686 {
2687 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2688 count);
2689 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2690 }
2691 dst_ctx->combine_with (ctx);
2692 }
2693
2694 if (src->agg.items
2695 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2696 {
2697 struct ipa_agg_jf_item *item;
2698 int j;
2699
2700 /* Currently we do not produce clobber aggregate jump functions,
2701 replace with merging when we do. */
2702 gcc_assert (!dst->agg.items);
2703
2704 dst->agg.items = vec_safe_copy (src->agg.items);
2705 dst->agg.by_ref = src->agg.by_ref;
2706 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2707 item->offset -= dst->value.ancestor.offset;
2708 }
2709
2710 if (src->type == IPA_JF_KNOWN_TYPE)
2711 combine_known_type_and_ancestor_jfs (src, dst);
2712 else if (src->type == IPA_JF_PASS_THROUGH
2713 && src->value.pass_through.operation == NOP_EXPR)
2714 {
2715 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2716 dst->value.ancestor.agg_preserved &=
2717 src->value.pass_through.agg_preserved;
2718 dst->value.ancestor.type_preserved &=
2719 src->value.pass_through.type_preserved;
2720 }
2721 else if (src->type == IPA_JF_ANCESTOR)
2722 {
2723 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2724 dst->value.ancestor.offset += src->value.ancestor.offset;
2725 dst->value.ancestor.agg_preserved &=
2726 src->value.ancestor.agg_preserved;
2727 dst->value.ancestor.type_preserved &=
2728 src->value.ancestor.type_preserved;
2729 }
2730 else
2731 dst->type = IPA_JF_UNKNOWN;
2732 }
2733 else if (dst->type == IPA_JF_PASS_THROUGH)
2734 {
2735 struct ipa_jump_func *src;
2736 /* We must check range due to calls with variable number of arguments
2737 and we cannot combine jump functions with operations. */
2738 if (dst->value.pass_through.operation == NOP_EXPR
2739 && (dst->value.pass_through.formal_id
2740 < ipa_get_cs_argument_count (top)))
2741 {
2742 int dst_fid = dst->value.pass_through.formal_id;
2743 src = ipa_get_ith_jump_func (top, dst_fid);
2744 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2745 struct ipa_polymorphic_call_context *src_ctx
2746 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2747
2748 if (src_ctx && !src_ctx->useless_p ())
2749 {
2750 struct ipa_polymorphic_call_context ctx = *src_ctx;
2751
2752 /* TODO: Make type preserved safe WRT contexts. */
2753 if (!ipa_get_jf_pass_through_type_preserved (dst))
2754 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2755 if (!ctx.useless_p ())
2756 {
2757 if (!dst_ctx)
2758 {
2759 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2760 count);
2761 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2762 }
2763 dst_ctx->combine_with (ctx);
2764 }
2765 }
2766 switch (src->type)
2767 {
2768 case IPA_JF_UNKNOWN:
2769 dst->type = IPA_JF_UNKNOWN;
2770 break;
2771 case IPA_JF_KNOWN_TYPE:
2772 if (ipa_get_jf_pass_through_type_preserved (dst))
2773 ipa_set_jf_known_type (dst,
2774 ipa_get_jf_known_type_offset (src),
2775 ipa_get_jf_known_type_base_type (src),
2776 ipa_get_jf_known_type_component_type (src));
2777 else
2778 dst->type = IPA_JF_UNKNOWN;
2779 break;
2780 case IPA_JF_CONST:
2781 ipa_set_jf_cst_copy (dst, src);
2782 break;
2783
2784 case IPA_JF_PASS_THROUGH:
2785 {
2786 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2787 enum tree_code operation;
2788 operation = ipa_get_jf_pass_through_operation (src);
2789
2790 if (operation == NOP_EXPR)
2791 {
2792 bool agg_p, type_p;
2793 agg_p = dst_agg_p
2794 && ipa_get_jf_pass_through_agg_preserved (src);
2795 type_p = ipa_get_jf_pass_through_type_preserved (src)
2796 && ipa_get_jf_pass_through_type_preserved (dst);
2797 ipa_set_jf_simple_pass_through (dst, formal_id,
2798 agg_p, type_p);
2799 }
2800 else
2801 {
2802 tree operand = ipa_get_jf_pass_through_operand (src);
2803 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2804 operation);
2805 }
2806 break;
2807 }
2808 case IPA_JF_ANCESTOR:
2809 {
2810 bool agg_p, type_p;
2811 agg_p = dst_agg_p
2812 && ipa_get_jf_ancestor_agg_preserved (src);
2813 type_p = ipa_get_jf_ancestor_type_preserved (src)
2814 && ipa_get_jf_pass_through_type_preserved (dst);
2815 ipa_set_ancestor_jf (dst,
2816 ipa_get_jf_ancestor_offset (src),
2817 ipa_get_jf_ancestor_type (src),
2818 ipa_get_jf_ancestor_formal_id (src),
2819 agg_p, type_p);
2820 break;
2821 }
2822 default:
2823 gcc_unreachable ();
2824 }
2825
2826 if (src->agg.items
2827 && (dst_agg_p || !src->agg.by_ref))
2828 {
2829 /* Currently we do not produce clobber aggregate jump
2830 functions, replace with merging when we do. */
2831 gcc_assert (!dst->agg.items);
2832
2833 dst->agg.by_ref = src->agg.by_ref;
2834 dst->agg.items = vec_safe_copy (src->agg.items);
2835 }
2836 }
2837 else
2838 dst->type = IPA_JF_UNKNOWN;
2839 }
2840 }
2841 }
2842
2843 /* If TARGET is an addr_expr of a function declaration, make it the
2844 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2845 Otherwise, return NULL. */
2846
2847 struct cgraph_edge *
2848 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2849 bool speculative)
2850 {
2851 struct cgraph_node *callee;
2852 struct inline_edge_summary *es = inline_edge_summary (ie);
2853 bool unreachable = false;
2854
2855 if (TREE_CODE (target) == ADDR_EXPR)
2856 target = TREE_OPERAND (target, 0);
2857 if (TREE_CODE (target) != FUNCTION_DECL)
2858 {
2859 target = canonicalize_constructor_val (target, NULL);
2860 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2861 {
2862 if (ie->indirect_info->member_ptr)
2863 /* Member pointer call that goes through a VMT lookup. */
2864 return NULL;
2865
2866 if (dump_enabled_p ())
2867 {
2868 location_t loc = gimple_location_safe (ie->call_stmt);
2869 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2870 "discovered direct call to non-function in %s/%i, "
2871 "making it __builtin_unreachable\n",
2872 ie->caller->name (), ie->caller->order);
2873 }
2874
2875 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2876 callee = cgraph_node::get_create (target);
2877 unreachable = true;
2878 }
2879 else
2880 callee = cgraph_node::get (target);
2881 }
2882 else
2883 callee = cgraph_node::get (target);
2884
2885 /* Because may-edges are not explicitely represented and vtable may be external,
2886 we may create the first reference to the object in the unit. */
2887 if (!callee || callee->global.inlined_to)
2888 {
2889
2890 /* We are better to ensure we can refer to it.
2891 In the case of static functions we are out of luck, since we already
2892 removed its body. In the case of public functions we may or may
2893 not introduce the reference. */
2894 if (!canonicalize_constructor_val (target, NULL)
2895 || !TREE_PUBLIC (target))
2896 {
2897 if (dump_file)
2898 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2899 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2900 xstrdup (ie->caller->name ()),
2901 ie->caller->order,
2902 xstrdup (ie->callee->name ()),
2903 ie->callee->order);
2904 return NULL;
2905 }
2906 callee = cgraph_node::get_create (target);
2907 }
2908
2909 /* If the edge is already speculated. */
2910 if (speculative && ie->speculative)
2911 {
2912 struct cgraph_edge *e2;
2913 struct ipa_ref *ref;
2914 ie->speculative_call_info (e2, ie, ref);
2915 if (e2->callee->ultimate_alias_target ()
2916 != callee->ultimate_alias_target ())
2917 {
2918 if (dump_file)
2919 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2920 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2921 xstrdup (ie->caller->name ()),
2922 ie->caller->order,
2923 xstrdup (callee->name ()),
2924 callee->order,
2925 xstrdup (e2->callee->name ()),
2926 e2->callee->order);
2927 }
2928 else
2929 {
2930 if (dump_file)
2931 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2932 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2933 xstrdup (ie->caller->name ()),
2934 ie->caller->order,
2935 xstrdup (callee->name ()),
2936 callee->order);
2937 }
2938 return NULL;
2939 }
2940
2941 if (!dbg_cnt (devirt))
2942 return NULL;
2943
2944 ipa_check_create_node_params ();
2945
2946 /* We can not make edges to inline clones. It is bug that someone removed
2947 the cgraph node too early. */
2948 gcc_assert (!callee->global.inlined_to);
2949
2950 if (dump_file && !unreachable)
2951 {
2952 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2953 "(%s/%i -> %s/%i), for stmt ",
2954 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2955 speculative ? "speculative" : "known",
2956 xstrdup (ie->caller->name ()),
2957 ie->caller->order,
2958 xstrdup (callee->name ()),
2959 callee->order);
2960 if (ie->call_stmt)
2961 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2962 else
2963 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2964 }
2965 if (dump_enabled_p ())
2966 {
2967 location_t loc = gimple_location_safe (ie->call_stmt);
2968
2969 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2970 "converting indirect call in %s to direct call to %s\n",
2971 ie->caller->name (), callee->name ());
2972 }
2973 if (!speculative)
2974 ie = ie->make_direct (callee);
2975 else
2976 {
2977 if (!callee->can_be_discarded_p ())
2978 {
2979 cgraph_node *alias;
2980 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2981 if (alias)
2982 callee = alias;
2983 }
2984 ie = ie->make_speculative
2985 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2986 }
2987 es = inline_edge_summary (ie);
2988 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2989 - eni_size_weights.call_cost);
2990 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2991 - eni_time_weights.call_cost);
2992
2993 return ie;
2994 }
2995
2996 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2997 return NULL if there is not any. BY_REF specifies whether the value has to
2998 be passed by reference or by value. */
2999
3000 tree
3001 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
3002 HOST_WIDE_INT offset, bool by_ref)
3003 {
3004 struct ipa_agg_jf_item *item;
3005 int i;
3006
3007 if (by_ref != agg->by_ref)
3008 return NULL;
3009
3010 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3011 if (item->offset == offset)
3012 {
3013 /* Currently we do not have clobber values, return NULL for them once
3014 we do. */
3015 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3016 return item->value;
3017 }
3018 return NULL;
3019 }
3020
3021 /* Remove a reference to SYMBOL from the list of references of a node given by
3022 reference description RDESC. Return true if the reference has been
3023 successfully found and removed. */
3024
3025 static bool
3026 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3027 {
3028 struct ipa_ref *to_del;
3029 struct cgraph_edge *origin;
3030
3031 origin = rdesc->cs;
3032 if (!origin)
3033 return false;
3034 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3035 origin->lto_stmt_uid);
3036 if (!to_del)
3037 return false;
3038
3039 to_del->remove_reference ();
3040 if (dump_file)
3041 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
3042 xstrdup (origin->caller->name ()),
3043 origin->caller->order, xstrdup (symbol->name ()));
3044 return true;
3045 }
3046
3047 /* If JFUNC has a reference description with refcount different from
3048 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3049 NULL. JFUNC must be a constant jump function. */
3050
3051 static struct ipa_cst_ref_desc *
3052 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3053 {
3054 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3055 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3056 return rdesc;
3057 else
3058 return NULL;
3059 }
3060
3061 /* If the value of constant jump function JFUNC is an address of a function
3062 declaration, return the associated call graph node. Otherwise return
3063 NULL. */
3064
3065 static cgraph_node *
3066 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3067 {
3068 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3069 tree cst = ipa_get_jf_constant (jfunc);
3070 if (TREE_CODE (cst) != ADDR_EXPR
3071 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3072 return NULL;
3073
3074 return cgraph_node::get (TREE_OPERAND (cst, 0));
3075 }
3076
3077
3078 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3079 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3080 the edge specified in the rdesc. Return false if either the symbol or the
3081 reference could not be found, otherwise return true. */
3082
3083 static bool
3084 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3085 {
3086 struct ipa_cst_ref_desc *rdesc;
3087 if (jfunc->type == IPA_JF_CONST
3088 && (rdesc = jfunc_rdesc_usable (jfunc))
3089 && --rdesc->refcount == 0)
3090 {
3091 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3092 if (!symbol)
3093 return false;
3094
3095 return remove_described_reference (symbol, rdesc);
3096 }
3097 return true;
3098 }
3099
3100 /* Try to find a destination for indirect edge IE that corresponds to a simple
3101 call or a call of a member function pointer and where the destination is a
3102 pointer formal parameter described by jump function JFUNC. If it can be
3103 determined, return the newly direct edge, otherwise return NULL.
3104 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3105
3106 static struct cgraph_edge *
3107 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3108 struct ipa_jump_func *jfunc,
3109 struct ipa_node_params *new_root_info)
3110 {
3111 struct cgraph_edge *cs;
3112 tree target;
3113 bool agg_contents = ie->indirect_info->agg_contents;
3114
3115 if (ie->indirect_info->agg_contents)
3116 target = ipa_find_agg_cst_for_param (&jfunc->agg,
3117 ie->indirect_info->offset,
3118 ie->indirect_info->by_ref);
3119 else
3120 target = ipa_value_from_jfunc (new_root_info, jfunc);
3121 if (!target)
3122 return NULL;
3123 cs = ipa_make_edge_direct_to_target (ie, target);
3124
3125 if (cs && !agg_contents)
3126 {
3127 bool ok;
3128 gcc_checking_assert (cs->callee
3129 && (cs != ie
3130 || jfunc->type != IPA_JF_CONST
3131 || !cgraph_node_for_jfunc (jfunc)
3132 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3133 ok = try_decrement_rdesc_refcount (jfunc);
3134 gcc_checking_assert (ok);
3135 }
3136
3137 return cs;
3138 }
3139
3140 /* Return the target to be used in cases of impossible devirtualization. IE
3141 and target (the latter can be NULL) are dumped when dumping is enabled. */
3142
3143 tree
3144 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3145 {
3146 if (dump_file)
3147 {
3148 if (target)
3149 fprintf (dump_file,
3150 "Type inconsistent devirtualization: %s/%i->%s\n",
3151 ie->caller->name (), ie->caller->order,
3152 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3153 else
3154 fprintf (dump_file,
3155 "No devirtualization target in %s/%i\n",
3156 ie->caller->name (), ie->caller->order);
3157 }
3158 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3159 cgraph_node::get_create (new_target);
3160 return new_target;
3161 }
3162
3163 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3164 call based on a formal parameter which is described by jump function JFUNC
3165 and if it can be determined, make it direct and return the direct edge.
3166 Otherwise, return NULL. CTX describes the polymorphic context that the
3167 parameter the call is based on brings along with it. */
3168
3169 static struct cgraph_edge *
3170 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3171 struct ipa_jump_func *jfunc,
3172 struct ipa_polymorphic_call_context ctx)
3173 {
3174 tree target = NULL;
3175 bool speculative = false;
3176
3177 if (!flag_devirtualize)
3178 return NULL;
3179
3180 gcc_assert (!ie->indirect_info->by_ref);
3181
3182 /* Try to do lookup via known virtual table pointer value. */
3183 if (!ie->indirect_info->vptr_changed || flag_devirtualize_speculatively)
3184 {
3185 tree vtable;
3186 unsigned HOST_WIDE_INT offset;
3187 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
3188 ie->indirect_info->offset,
3189 true);
3190 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3191 {
3192 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3193 vtable, offset);
3194 if (t)
3195 {
3196 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3197 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3198 || !possible_polymorphic_call_target_p
3199 (ie, cgraph_node::get (t)))
3200 {
3201 /* Do not speculate builtin_unreachable, it is stpid! */
3202 if (!ie->indirect_info->vptr_changed)
3203 target = ipa_impossible_devirt_target (ie, target);
3204 }
3205 else
3206 {
3207 target = t;
3208 speculative = ie->indirect_info->vptr_changed;
3209 }
3210 }
3211 }
3212 }
3213
3214 ipa_polymorphic_call_context ie_context (ie);
3215 vec <cgraph_node *>targets;
3216 bool final;
3217
3218 ctx.offset_by (ie->indirect_info->offset);
3219 if (ie->indirect_info->vptr_changed)
3220 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3221 ie->indirect_info->otr_type);
3222 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3223 targets = possible_polymorphic_call_targets
3224 (ie->indirect_info->otr_type,
3225 ie->indirect_info->otr_token,
3226 ctx, &final);
3227 if (final && targets.length () <= 1)
3228 {
3229 if (targets.length () == 1)
3230 target = targets[0]->decl;
3231 else
3232 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3233 }
3234 else if (!target && flag_devirtualize_speculatively
3235 && !ie->speculative && ie->maybe_hot_p ())
3236 {
3237 cgraph_node *n;
3238 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3239 ie->indirect_info->otr_token,
3240 ie->indirect_info->context);
3241 if (n)
3242 {
3243 target = n->decl;
3244 speculative = true;
3245 }
3246 }
3247
3248 if (target)
3249 {
3250 if (!possible_polymorphic_call_target_p
3251 (ie, cgraph_node::get_create (target)))
3252 {
3253 if (speculative)
3254 return NULL;
3255 target = ipa_impossible_devirt_target (ie, target);
3256 }
3257 return ipa_make_edge_direct_to_target (ie, target, speculative);
3258 }
3259 else
3260 return NULL;
3261 }
3262
3263 /* Update the param called notes associated with NODE when CS is being inlined,
3264 assuming NODE is (potentially indirectly) inlined into CS->callee.
3265 Moreover, if the callee is discovered to be constant, create a new cgraph
3266 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3267 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3268
3269 static bool
3270 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3271 struct cgraph_node *node,
3272 vec<cgraph_edge *> *new_edges)
3273 {
3274 struct ipa_edge_args *top;
3275 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3276 struct ipa_node_params *new_root_info;
3277 bool res = false;
3278
3279 ipa_check_create_edge_args ();
3280 top = IPA_EDGE_REF (cs);
3281 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3282 ? cs->caller->global.inlined_to
3283 : cs->caller);
3284
3285 for (ie = node->indirect_calls; ie; ie = next_ie)
3286 {
3287 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3288 struct ipa_jump_func *jfunc;
3289 int param_index;
3290
3291 next_ie = ie->next_callee;
3292
3293 if (ici->param_index == -1)
3294 continue;
3295
3296 /* We must check range due to calls with variable number of arguments: */
3297 if (ici->param_index >= ipa_get_cs_argument_count (top))
3298 {
3299 ici->param_index = -1;
3300 continue;
3301 }
3302
3303 param_index = ici->param_index;
3304 jfunc = ipa_get_ith_jump_func (top, param_index);
3305
3306 if (!flag_indirect_inlining)
3307 new_direct_edge = NULL;
3308 else if (ici->polymorphic)
3309 {
3310 ipa_polymorphic_call_context ctx;
3311 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3312 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3313 }
3314 else
3315 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3316 new_root_info);
3317 /* If speculation was removed, then we need to do nothing. */
3318 if (new_direct_edge && new_direct_edge != ie)
3319 {
3320 new_direct_edge->indirect_inlining_edge = 1;
3321 top = IPA_EDGE_REF (cs);
3322 res = true;
3323 }
3324 else if (new_direct_edge)
3325 {
3326 new_direct_edge->indirect_inlining_edge = 1;
3327 if (new_direct_edge->call_stmt)
3328 new_direct_edge->call_stmt_cannot_inline_p
3329 = !gimple_check_call_matching_types (
3330 new_direct_edge->call_stmt,
3331 new_direct_edge->callee->decl, false);
3332 if (new_edges)
3333 {
3334 new_edges->safe_push (new_direct_edge);
3335 res = true;
3336 }
3337 top = IPA_EDGE_REF (cs);
3338 }
3339 else if (jfunc->type == IPA_JF_PASS_THROUGH
3340 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3341 {
3342 if ((ici->agg_contents
3343 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3344 || (ici->polymorphic
3345 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3346 ici->param_index = -1;
3347 else
3348 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3349 }
3350 else if (jfunc->type == IPA_JF_ANCESTOR)
3351 {
3352 if ((ici->agg_contents
3353 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3354 || (ici->polymorphic
3355 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3356 ici->param_index = -1;
3357 else
3358 {
3359 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3360 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3361 }
3362 }
3363 else
3364 /* Either we can find a destination for this edge now or never. */
3365 ici->param_index = -1;
3366 }
3367
3368 return res;
3369 }
3370
3371 /* Recursively traverse subtree of NODE (including node) made of inlined
3372 cgraph_edges when CS has been inlined and invoke
3373 update_indirect_edges_after_inlining on all nodes and
3374 update_jump_functions_after_inlining on all non-inlined edges that lead out
3375 of this subtree. Newly discovered indirect edges will be added to
3376 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3377 created. */
3378
3379 static bool
3380 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3381 struct cgraph_node *node,
3382 vec<cgraph_edge *> *new_edges)
3383 {
3384 struct cgraph_edge *e;
3385 bool res;
3386
3387 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3388
3389 for (e = node->callees; e; e = e->next_callee)
3390 if (!e->inline_failed)
3391 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3392 else
3393 update_jump_functions_after_inlining (cs, e);
3394 for (e = node->indirect_calls; e; e = e->next_callee)
3395 update_jump_functions_after_inlining (cs, e);
3396
3397 return res;
3398 }
3399
3400 /* Combine two controlled uses counts as done during inlining. */
3401
3402 static int
3403 combine_controlled_uses_counters (int c, int d)
3404 {
3405 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3406 return IPA_UNDESCRIBED_USE;
3407 else
3408 return c + d - 1;
3409 }
3410
3411 /* Propagate number of controlled users from CS->caleee to the new root of the
3412 tree of inlined nodes. */
3413
3414 static void
3415 propagate_controlled_uses (struct cgraph_edge *cs)
3416 {
3417 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3418 struct cgraph_node *new_root = cs->caller->global.inlined_to
3419 ? cs->caller->global.inlined_to : cs->caller;
3420 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3421 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3422 int count, i;
3423
3424 count = MIN (ipa_get_cs_argument_count (args),
3425 ipa_get_param_count (old_root_info));
3426 for (i = 0; i < count; i++)
3427 {
3428 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3429 struct ipa_cst_ref_desc *rdesc;
3430
3431 if (jf->type == IPA_JF_PASS_THROUGH)
3432 {
3433 int src_idx, c, d;
3434 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3435 c = ipa_get_controlled_uses (new_root_info, src_idx);
3436 d = ipa_get_controlled_uses (old_root_info, i);
3437
3438 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3439 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3440 c = combine_controlled_uses_counters (c, d);
3441 ipa_set_controlled_uses (new_root_info, src_idx, c);
3442 if (c == 0 && new_root_info->ipcp_orig_node)
3443 {
3444 struct cgraph_node *n;
3445 struct ipa_ref *ref;
3446 tree t = new_root_info->known_csts[src_idx];
3447
3448 if (t && TREE_CODE (t) == ADDR_EXPR
3449 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3450 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3451 && (ref = new_root->find_reference (n, NULL, 0)))
3452 {
3453 if (dump_file)
3454 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3455 "reference from %s/%i to %s/%i.\n",
3456 xstrdup (new_root->name ()),
3457 new_root->order,
3458 xstrdup (n->name ()), n->order);
3459 ref->remove_reference ();
3460 }
3461 }
3462 }
3463 else if (jf->type == IPA_JF_CONST
3464 && (rdesc = jfunc_rdesc_usable (jf)))
3465 {
3466 int d = ipa_get_controlled_uses (old_root_info, i);
3467 int c = rdesc->refcount;
3468 rdesc->refcount = combine_controlled_uses_counters (c, d);
3469 if (rdesc->refcount == 0)
3470 {
3471 tree cst = ipa_get_jf_constant (jf);
3472 struct cgraph_node *n;
3473 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3474 && TREE_CODE (TREE_OPERAND (cst, 0))
3475 == FUNCTION_DECL);
3476 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3477 if (n)
3478 {
3479 struct cgraph_node *clone;
3480 bool ok;
3481 ok = remove_described_reference (n, rdesc);
3482 gcc_checking_assert (ok);
3483
3484 clone = cs->caller;
3485 while (clone->global.inlined_to
3486 && clone != rdesc->cs->caller
3487 && IPA_NODE_REF (clone)->ipcp_orig_node)
3488 {
3489 struct ipa_ref *ref;
3490 ref = clone->find_reference (n, NULL, 0);
3491 if (ref)
3492 {
3493 if (dump_file)
3494 fprintf (dump_file, "ipa-prop: Removing "
3495 "cloning-created reference "
3496 "from %s/%i to %s/%i.\n",
3497 xstrdup (clone->name ()),
3498 clone->order,
3499 xstrdup (n->name ()),
3500 n->order);
3501 ref->remove_reference ();
3502 }
3503 clone = clone->callers->caller;
3504 }
3505 }
3506 }
3507 }
3508 }
3509
3510 for (i = ipa_get_param_count (old_root_info);
3511 i < ipa_get_cs_argument_count (args);
3512 i++)
3513 {
3514 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3515
3516 if (jf->type == IPA_JF_CONST)
3517 {
3518 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3519 if (rdesc)
3520 rdesc->refcount = IPA_UNDESCRIBED_USE;
3521 }
3522 else if (jf->type == IPA_JF_PASS_THROUGH)
3523 ipa_set_controlled_uses (new_root_info,
3524 jf->value.pass_through.formal_id,
3525 IPA_UNDESCRIBED_USE);
3526 }
3527 }
3528
3529 /* Update jump functions and call note functions on inlining the call site CS.
3530 CS is expected to lead to a node already cloned by
3531 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3532 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3533 created. */
3534
3535 bool
3536 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3537 vec<cgraph_edge *> *new_edges)
3538 {
3539 bool changed;
3540 /* Do nothing if the preparation phase has not been carried out yet
3541 (i.e. during early inlining). */
3542 if (!ipa_node_params_vector.exists ())
3543 return false;
3544 gcc_assert (ipa_edge_args_vector);
3545
3546 propagate_controlled_uses (cs);
3547 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3548
3549 return changed;
3550 }
3551
3552 /* Frees all dynamically allocated structures that the argument info points
3553 to. */
3554
3555 void
3556 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3557 {
3558 vec_free (args->jump_functions);
3559 memset (args, 0, sizeof (*args));
3560 }
3561
3562 /* Free all ipa_edge structures. */
3563
3564 void
3565 ipa_free_all_edge_args (void)
3566 {
3567 int i;
3568 struct ipa_edge_args *args;
3569
3570 if (!ipa_edge_args_vector)
3571 return;
3572
3573 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3574 ipa_free_edge_args_substructures (args);
3575
3576 vec_free (ipa_edge_args_vector);
3577 }
3578
3579 /* Frees all dynamically allocated structures that the param info points
3580 to. */
3581
3582 void
3583 ipa_free_node_params_substructures (struct ipa_node_params *info)
3584 {
3585 info->descriptors.release ();
3586 free (info->lattices);
3587 /* Lattice values and their sources are deallocated with their alocation
3588 pool. */
3589 info->known_csts.release ();
3590 info->known_contexts.release ();
3591 memset (info, 0, sizeof (*info));
3592 }
3593
3594 /* Free all ipa_node_params structures. */
3595
3596 void
3597 ipa_free_all_node_params (void)
3598 {
3599 int i;
3600 struct ipa_node_params *info;
3601
3602 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3603 ipa_free_node_params_substructures (info);
3604
3605 ipa_node_params_vector.release ();
3606 }
3607
3608 /* Set the aggregate replacements of NODE to be AGGVALS. */
3609
3610 void
3611 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3612 struct ipa_agg_replacement_value *aggvals)
3613 {
3614 if (vec_safe_length (ipa_node_agg_replacements)
3615 <= (unsigned) symtab->cgraph_max_uid)
3616 vec_safe_grow_cleared (ipa_node_agg_replacements,
3617 symtab->cgraph_max_uid + 1);
3618
3619 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3620 }
3621
3622 /* Hook that is called by cgraph.c when an edge is removed. */
3623
3624 static void
3625 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3626 {
3627 struct ipa_edge_args *args;
3628
3629 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3630 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3631 return;
3632
3633 args = IPA_EDGE_REF (cs);
3634 if (args->jump_functions)
3635 {
3636 struct ipa_jump_func *jf;
3637 int i;
3638 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3639 {
3640 struct ipa_cst_ref_desc *rdesc;
3641 try_decrement_rdesc_refcount (jf);
3642 if (jf->type == IPA_JF_CONST
3643 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3644 && rdesc->cs == cs)
3645 rdesc->cs = NULL;
3646 }
3647 }
3648
3649 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3650 }
3651
3652 /* Hook that is called by cgraph.c when a node is removed. */
3653
3654 static void
3655 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3656 {
3657 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3658 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3659 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3660 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3661 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3662 }
3663
3664 /* Hook that is called by cgraph.c when an edge is duplicated. */
3665
3666 static void
3667 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3668 __attribute__((unused)) void *data)
3669 {
3670 struct ipa_edge_args *old_args, *new_args;
3671 unsigned int i;
3672
3673 ipa_check_create_edge_args ();
3674
3675 old_args = IPA_EDGE_REF (src);
3676 new_args = IPA_EDGE_REF (dst);
3677
3678 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3679 if (old_args->polymorphic_call_contexts)
3680 new_args->polymorphic_call_contexts
3681 = vec_safe_copy (old_args->polymorphic_call_contexts);
3682
3683 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3684 {
3685 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3686 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3687
3688 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3689
3690 if (src_jf->type == IPA_JF_CONST)
3691 {
3692 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3693
3694 if (!src_rdesc)
3695 dst_jf->value.constant.rdesc = NULL;
3696 else if (src->caller == dst->caller)
3697 {
3698 struct ipa_ref *ref;
3699 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3700 gcc_checking_assert (n);
3701 ref = src->caller->find_reference (n, src->call_stmt,
3702 src->lto_stmt_uid);
3703 gcc_checking_assert (ref);
3704 dst->caller->clone_reference (ref, ref->stmt);
3705
3706 gcc_checking_assert (ipa_refdesc_pool);
3707 struct ipa_cst_ref_desc *dst_rdesc
3708 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3709 dst_rdesc->cs = dst;
3710 dst_rdesc->refcount = src_rdesc->refcount;
3711 dst_rdesc->next_duplicate = NULL;
3712 dst_jf->value.constant.rdesc = dst_rdesc;
3713 }
3714 else if (src_rdesc->cs == src)
3715 {
3716 struct ipa_cst_ref_desc *dst_rdesc;
3717 gcc_checking_assert (ipa_refdesc_pool);
3718 dst_rdesc
3719 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3720 dst_rdesc->cs = dst;
3721 dst_rdesc->refcount = src_rdesc->refcount;
3722 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3723 src_rdesc->next_duplicate = dst_rdesc;
3724 dst_jf->value.constant.rdesc = dst_rdesc;
3725 }
3726 else
3727 {
3728 struct ipa_cst_ref_desc *dst_rdesc;
3729 /* This can happen during inlining, when a JFUNC can refer to a
3730 reference taken in a function up in the tree of inline clones.
3731 We need to find the duplicate that refers to our tree of
3732 inline clones. */
3733
3734 gcc_assert (dst->caller->global.inlined_to);
3735 for (dst_rdesc = src_rdesc->next_duplicate;
3736 dst_rdesc;
3737 dst_rdesc = dst_rdesc->next_duplicate)
3738 {
3739 struct cgraph_node *top;
3740 top = dst_rdesc->cs->caller->global.inlined_to
3741 ? dst_rdesc->cs->caller->global.inlined_to
3742 : dst_rdesc->cs->caller;
3743 if (dst->caller->global.inlined_to == top)
3744 break;
3745 }
3746 gcc_assert (dst_rdesc);
3747 dst_jf->value.constant.rdesc = dst_rdesc;
3748 }
3749 }
3750 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3751 && src->caller == dst->caller)
3752 {
3753 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3754 ? dst->caller->global.inlined_to : dst->caller;
3755 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3756 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3757
3758 int c = ipa_get_controlled_uses (root_info, idx);
3759 if (c != IPA_UNDESCRIBED_USE)
3760 {
3761 c++;
3762 ipa_set_controlled_uses (root_info, idx, c);
3763 }
3764 }
3765 }
3766 }
3767
3768 /* Hook that is called by cgraph.c when a node is duplicated. */
3769
3770 static void
3771 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3772 ATTRIBUTE_UNUSED void *data)
3773 {
3774 struct ipa_node_params *old_info, *new_info;
3775 struct ipa_agg_replacement_value *old_av, *new_av;
3776
3777 ipa_check_create_node_params ();
3778 old_info = IPA_NODE_REF (src);
3779 new_info = IPA_NODE_REF (dst);
3780
3781 new_info->descriptors = old_info->descriptors.copy ();
3782 new_info->lattices = NULL;
3783 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3784
3785 new_info->analysis_done = old_info->analysis_done;
3786 new_info->node_enqueued = old_info->node_enqueued;
3787
3788 old_av = ipa_get_agg_replacements_for_node (src);
3789 if (!old_av)
3790 return;
3791
3792 new_av = NULL;
3793 while (old_av)
3794 {
3795 struct ipa_agg_replacement_value *v;
3796
3797 v = ggc_alloc<ipa_agg_replacement_value> ();
3798 memcpy (v, old_av, sizeof (*v));
3799 v->next = new_av;
3800 new_av = v;
3801 old_av = old_av->next;
3802 }
3803 ipa_set_node_agg_value_chain (dst, new_av);
3804 }
3805
3806
3807 /* Analyze newly added function into callgraph. */
3808
3809 static void
3810 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3811 {
3812 if (node->has_gimple_body_p ())
3813 ipa_analyze_node (node);
3814 }
3815
3816 /* Register our cgraph hooks if they are not already there. */
3817
3818 void
3819 ipa_register_cgraph_hooks (void)
3820 {
3821 if (!edge_removal_hook_holder)
3822 edge_removal_hook_holder =
3823 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3824 if (!node_removal_hook_holder)
3825 node_removal_hook_holder =
3826 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3827 if (!edge_duplication_hook_holder)
3828 edge_duplication_hook_holder =
3829 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3830 if (!node_duplication_hook_holder)
3831 node_duplication_hook_holder =
3832 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3833 function_insertion_hook_holder =
3834 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3835 }
3836
3837 /* Unregister our cgraph hooks if they are not already there. */
3838
3839 static void
3840 ipa_unregister_cgraph_hooks (void)
3841 {
3842 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3843 edge_removal_hook_holder = NULL;
3844 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3845 node_removal_hook_holder = NULL;
3846 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3847 edge_duplication_hook_holder = NULL;
3848 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3849 node_duplication_hook_holder = NULL;
3850 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3851 function_insertion_hook_holder = NULL;
3852 }
3853
3854 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3855 longer needed after ipa-cp. */
3856
3857 void
3858 ipa_free_all_structures_after_ipa_cp (void)
3859 {
3860 if (!optimize)
3861 {
3862 ipa_free_all_edge_args ();
3863 ipa_free_all_node_params ();
3864 free_alloc_pool (ipcp_sources_pool);
3865 free_alloc_pool (ipcp_cst_values_pool);
3866 free_alloc_pool (ipcp_poly_ctx_values_pool);
3867 free_alloc_pool (ipcp_agg_lattice_pool);
3868 ipa_unregister_cgraph_hooks ();
3869 if (ipa_refdesc_pool)
3870 free_alloc_pool (ipa_refdesc_pool);
3871 }
3872 }
3873
3874 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3875 longer needed after indirect inlining. */
3876
3877 void
3878 ipa_free_all_structures_after_iinln (void)
3879 {
3880 ipa_free_all_edge_args ();
3881 ipa_free_all_node_params ();
3882 ipa_unregister_cgraph_hooks ();
3883 if (ipcp_sources_pool)
3884 free_alloc_pool (ipcp_sources_pool);
3885 if (ipcp_cst_values_pool)
3886 free_alloc_pool (ipcp_cst_values_pool);
3887 if (ipcp_poly_ctx_values_pool)
3888 free_alloc_pool (ipcp_poly_ctx_values_pool);
3889 if (ipcp_agg_lattice_pool)
3890 free_alloc_pool (ipcp_agg_lattice_pool);
3891 if (ipa_refdesc_pool)
3892 free_alloc_pool (ipa_refdesc_pool);
3893 }
3894
3895 /* Print ipa_tree_map data structures of all functions in the
3896 callgraph to F. */
3897
3898 void
3899 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3900 {
3901 int i, count;
3902 struct ipa_node_params *info;
3903
3904 if (!node->definition)
3905 return;
3906 info = IPA_NODE_REF (node);
3907 fprintf (f, " function %s/%i parameter descriptors:\n",
3908 node->name (), node->order);
3909 count = ipa_get_param_count (info);
3910 for (i = 0; i < count; i++)
3911 {
3912 int c;
3913
3914 fprintf (f, " ");
3915 ipa_dump_param (f, info, i);
3916 if (ipa_is_param_used (info, i))
3917 fprintf (f, " used");
3918 c = ipa_get_controlled_uses (info, i);
3919 if (c == IPA_UNDESCRIBED_USE)
3920 fprintf (f, " undescribed_use");
3921 else
3922 fprintf (f, " controlled_uses=%i", c);
3923 fprintf (f, "\n");
3924 }
3925 }
3926
3927 /* Print ipa_tree_map data structures of all functions in the
3928 callgraph to F. */
3929
3930 void
3931 ipa_print_all_params (FILE * f)
3932 {
3933 struct cgraph_node *node;
3934
3935 fprintf (f, "\nFunction parameters:\n");
3936 FOR_EACH_FUNCTION (node)
3937 ipa_print_node_params (f, node);
3938 }
3939
3940 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3941
3942 vec<tree>
3943 ipa_get_vector_of_formal_parms (tree fndecl)
3944 {
3945 vec<tree> args;
3946 int count;
3947 tree parm;
3948
3949 gcc_assert (!flag_wpa);
3950 count = count_formal_params (fndecl);
3951 args.create (count);
3952 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3953 args.quick_push (parm);
3954
3955 return args;
3956 }
3957
3958 /* Return a heap allocated vector containing types of formal parameters of
3959 function type FNTYPE. */
3960
3961 vec<tree>
3962 ipa_get_vector_of_formal_parm_types (tree fntype)
3963 {
3964 vec<tree> types;
3965 int count = 0;
3966 tree t;
3967
3968 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3969 count++;
3970
3971 types.create (count);
3972 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3973 types.quick_push (TREE_VALUE (t));
3974
3975 return types;
3976 }
3977
3978 /* Modify the function declaration FNDECL and its type according to the plan in
3979 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3980 to reflect the actual parameters being modified which are determined by the
3981 base_index field. */
3982
3983 void
3984 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3985 {
3986 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3987 tree orig_type = TREE_TYPE (fndecl);
3988 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3989
3990 /* The following test is an ugly hack, some functions simply don't have any
3991 arguments in their type. This is probably a bug but well... */
3992 bool care_for_types = (old_arg_types != NULL_TREE);
3993 bool last_parm_void;
3994 vec<tree> otypes;
3995 if (care_for_types)
3996 {
3997 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3998 == void_type_node);
3999 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
4000 if (last_parm_void)
4001 gcc_assert (oparms.length () + 1 == otypes.length ());
4002 else
4003 gcc_assert (oparms.length () == otypes.length ());
4004 }
4005 else
4006 {
4007 last_parm_void = false;
4008 otypes.create (0);
4009 }
4010
4011 int len = adjustments.length ();
4012 tree *link = &DECL_ARGUMENTS (fndecl);
4013 tree new_arg_types = NULL;
4014 for (int i = 0; i < len; i++)
4015 {
4016 struct ipa_parm_adjustment *adj;
4017 gcc_assert (link);
4018
4019 adj = &adjustments[i];
4020 tree parm;
4021 if (adj->op == IPA_PARM_OP_NEW)
4022 parm = NULL;
4023 else
4024 parm = oparms[adj->base_index];
4025 adj->base = parm;
4026
4027 if (adj->op == IPA_PARM_OP_COPY)
4028 {
4029 if (care_for_types)
4030 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4031 new_arg_types);
4032 *link = parm;
4033 link = &DECL_CHAIN (parm);
4034 }
4035 else if (adj->op != IPA_PARM_OP_REMOVE)
4036 {
4037 tree new_parm;
4038 tree ptype;
4039
4040 if (adj->by_ref)
4041 ptype = build_pointer_type (adj->type);
4042 else
4043 {
4044 ptype = adj->type;
4045 if (is_gimple_reg_type (ptype))
4046 {
4047 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4048 if (TYPE_ALIGN (ptype) < malign)
4049 ptype = build_aligned_type (ptype, malign);
4050 }
4051 }
4052
4053 if (care_for_types)
4054 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4055
4056 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4057 ptype);
4058 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4059 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4060 DECL_ARTIFICIAL (new_parm) = 1;
4061 DECL_ARG_TYPE (new_parm) = ptype;
4062 DECL_CONTEXT (new_parm) = fndecl;
4063 TREE_USED (new_parm) = 1;
4064 DECL_IGNORED_P (new_parm) = 1;
4065 layout_decl (new_parm, 0);
4066
4067 if (adj->op == IPA_PARM_OP_NEW)
4068 adj->base = NULL;
4069 else
4070 adj->base = parm;
4071 adj->new_decl = new_parm;
4072
4073 *link = new_parm;
4074 link = &DECL_CHAIN (new_parm);
4075 }
4076 }
4077
4078 *link = NULL_TREE;
4079
4080 tree new_reversed = NULL;
4081 if (care_for_types)
4082 {
4083 new_reversed = nreverse (new_arg_types);
4084 if (last_parm_void)
4085 {
4086 if (new_reversed)
4087 TREE_CHAIN (new_arg_types) = void_list_node;
4088 else
4089 new_reversed = void_list_node;
4090 }
4091 }
4092
4093 /* Use copy_node to preserve as much as possible from original type
4094 (debug info, attribute lists etc.)
4095 Exception is METHOD_TYPEs must have THIS argument.
4096 When we are asked to remove it, we need to build new FUNCTION_TYPE
4097 instead. */
4098 tree new_type = NULL;
4099 if (TREE_CODE (orig_type) != METHOD_TYPE
4100 || (adjustments[0].op == IPA_PARM_OP_COPY
4101 && adjustments[0].base_index == 0))
4102 {
4103 new_type = build_distinct_type_copy (orig_type);
4104 TYPE_ARG_TYPES (new_type) = new_reversed;
4105 }
4106 else
4107 {
4108 new_type
4109 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4110 new_reversed));
4111 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4112 DECL_VINDEX (fndecl) = NULL_TREE;
4113 }
4114
4115 /* When signature changes, we need to clear builtin info. */
4116 if (DECL_BUILT_IN (fndecl))
4117 {
4118 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4119 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4120 }
4121
4122 TREE_TYPE (fndecl) = new_type;
4123 DECL_VIRTUAL_P (fndecl) = 0;
4124 DECL_LANG_SPECIFIC (fndecl) = NULL;
4125 otypes.release ();
4126 oparms.release ();
4127 }
4128
4129 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4130 If this is a directly recursive call, CS must be NULL. Otherwise it must
4131 contain the corresponding call graph edge. */
4132
4133 void
4134 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
4135 ipa_parm_adjustment_vec adjustments)
4136 {
4137 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4138 vec<tree> vargs;
4139 vec<tree, va_gc> **debug_args = NULL;
4140 gimple new_stmt;
4141 gimple_stmt_iterator gsi, prev_gsi;
4142 tree callee_decl;
4143 int i, len;
4144
4145 len = adjustments.length ();
4146 vargs.create (len);
4147 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4148 current_node->remove_stmt_references (stmt);
4149
4150 gsi = gsi_for_stmt (stmt);
4151 prev_gsi = gsi;
4152 gsi_prev (&prev_gsi);
4153 for (i = 0; i < len; i++)
4154 {
4155 struct ipa_parm_adjustment *adj;
4156
4157 adj = &adjustments[i];
4158
4159 if (adj->op == IPA_PARM_OP_COPY)
4160 {
4161 tree arg = gimple_call_arg (stmt, adj->base_index);
4162
4163 vargs.quick_push (arg);
4164 }
4165 else if (adj->op != IPA_PARM_OP_REMOVE)
4166 {
4167 tree expr, base, off;
4168 location_t loc;
4169 unsigned int deref_align = 0;
4170 bool deref_base = false;
4171
4172 /* We create a new parameter out of the value of the old one, we can
4173 do the following kind of transformations:
4174
4175 - A scalar passed by reference is converted to a scalar passed by
4176 value. (adj->by_ref is false and the type of the original
4177 actual argument is a pointer to a scalar).
4178
4179 - A part of an aggregate is passed instead of the whole aggregate.
4180 The part can be passed either by value or by reference, this is
4181 determined by value of adj->by_ref. Moreover, the code below
4182 handles both situations when the original aggregate is passed by
4183 value (its type is not a pointer) and when it is passed by
4184 reference (it is a pointer to an aggregate).
4185
4186 When the new argument is passed by reference (adj->by_ref is true)
4187 it must be a part of an aggregate and therefore we form it by
4188 simply taking the address of a reference inside the original
4189 aggregate. */
4190
4191 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4192 base = gimple_call_arg (stmt, adj->base_index);
4193 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4194 : EXPR_LOCATION (base);
4195
4196 if (TREE_CODE (base) != ADDR_EXPR
4197 && POINTER_TYPE_P (TREE_TYPE (base)))
4198 off = build_int_cst (adj->alias_ptr_type,
4199 adj->offset / BITS_PER_UNIT);
4200 else
4201 {
4202 HOST_WIDE_INT base_offset;
4203 tree prev_base;
4204 bool addrof;
4205
4206 if (TREE_CODE (base) == ADDR_EXPR)
4207 {
4208 base = TREE_OPERAND (base, 0);
4209 addrof = true;
4210 }
4211 else
4212 addrof = false;
4213 prev_base = base;
4214 base = get_addr_base_and_unit_offset (base, &base_offset);
4215 /* Aggregate arguments can have non-invariant addresses. */
4216 if (!base)
4217 {
4218 base = build_fold_addr_expr (prev_base);
4219 off = build_int_cst (adj->alias_ptr_type,
4220 adj->offset / BITS_PER_UNIT);
4221 }
4222 else if (TREE_CODE (base) == MEM_REF)
4223 {
4224 if (!addrof)
4225 {
4226 deref_base = true;
4227 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4228 }
4229 off = build_int_cst (adj->alias_ptr_type,
4230 base_offset
4231 + adj->offset / BITS_PER_UNIT);
4232 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4233 off);
4234 base = TREE_OPERAND (base, 0);
4235 }
4236 else
4237 {
4238 off = build_int_cst (adj->alias_ptr_type,
4239 base_offset
4240 + adj->offset / BITS_PER_UNIT);
4241 base = build_fold_addr_expr (base);
4242 }
4243 }
4244
4245 if (!adj->by_ref)
4246 {
4247 tree type = adj->type;
4248 unsigned int align;
4249 unsigned HOST_WIDE_INT misalign;
4250
4251 if (deref_base)
4252 {
4253 align = deref_align;
4254 misalign = 0;
4255 }
4256 else
4257 {
4258 get_pointer_alignment_1 (base, &align, &misalign);
4259 if (TYPE_ALIGN (type) > align)
4260 align = TYPE_ALIGN (type);
4261 }
4262 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4263 * BITS_PER_UNIT);
4264 misalign = misalign & (align - 1);
4265 if (misalign != 0)
4266 align = (misalign & -misalign);
4267 if (align < TYPE_ALIGN (type))
4268 type = build_aligned_type (type, align);
4269 base = force_gimple_operand_gsi (&gsi, base,
4270 true, NULL, true, GSI_SAME_STMT);
4271 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4272 /* If expr is not a valid gimple call argument emit
4273 a load into a temporary. */
4274 if (is_gimple_reg_type (TREE_TYPE (expr)))
4275 {
4276 gimple tem = gimple_build_assign (NULL_TREE, expr);
4277 if (gimple_in_ssa_p (cfun))
4278 {
4279 gimple_set_vuse (tem, gimple_vuse (stmt));
4280 expr = make_ssa_name (TREE_TYPE (expr), tem);
4281 }
4282 else
4283 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4284 gimple_assign_set_lhs (tem, expr);
4285 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4286 }
4287 }
4288 else
4289 {
4290 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4291 expr = build_fold_addr_expr (expr);
4292 expr = force_gimple_operand_gsi (&gsi, expr,
4293 true, NULL, true, GSI_SAME_STMT);
4294 }
4295 vargs.quick_push (expr);
4296 }
4297 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4298 {
4299 unsigned int ix;
4300 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4301 gimple def_temp;
4302
4303 arg = gimple_call_arg (stmt, adj->base_index);
4304 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4305 {
4306 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4307 continue;
4308 arg = fold_convert_loc (gimple_location (stmt),
4309 TREE_TYPE (origin), arg);
4310 }
4311 if (debug_args == NULL)
4312 debug_args = decl_debug_args_insert (callee_decl);
4313 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4314 if (ddecl == origin)
4315 {
4316 ddecl = (**debug_args)[ix + 1];
4317 break;
4318 }
4319 if (ddecl == NULL)
4320 {
4321 ddecl = make_node (DEBUG_EXPR_DECL);
4322 DECL_ARTIFICIAL (ddecl) = 1;
4323 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4324 DECL_MODE (ddecl) = DECL_MODE (origin);
4325
4326 vec_safe_push (*debug_args, origin);
4327 vec_safe_push (*debug_args, ddecl);
4328 }
4329 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4330 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4331 }
4332 }
4333
4334 if (dump_file && (dump_flags & TDF_DETAILS))
4335 {
4336 fprintf (dump_file, "replacing stmt:");
4337 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4338 }
4339
4340 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4341 vargs.release ();
4342 if (gimple_call_lhs (stmt))
4343 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4344
4345 gimple_set_block (new_stmt, gimple_block (stmt));
4346 if (gimple_has_location (stmt))
4347 gimple_set_location (new_stmt, gimple_location (stmt));
4348 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4349 gimple_call_copy_flags (new_stmt, stmt);
4350 if (gimple_in_ssa_p (cfun))
4351 {
4352 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4353 if (gimple_vdef (stmt))
4354 {
4355 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4356 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4357 }
4358 }
4359
4360 if (dump_file && (dump_flags & TDF_DETAILS))
4361 {
4362 fprintf (dump_file, "with stmt:");
4363 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4364 fprintf (dump_file, "\n");
4365 }
4366 gsi_replace (&gsi, new_stmt, true);
4367 if (cs)
4368 cs->set_call_stmt (new_stmt);
4369 do
4370 {
4371 current_node->record_stmt_references (gsi_stmt (gsi));
4372 gsi_prev (&gsi);
4373 }
4374 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4375 }
4376
4377 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4378 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4379 specifies whether the function should care about type incompatibility the
4380 current and new expressions. If it is false, the function will leave
4381 incompatibility issues to the caller. Return true iff the expression
4382 was modified. */
4383
4384 bool
4385 ipa_modify_expr (tree *expr, bool convert,
4386 ipa_parm_adjustment_vec adjustments)
4387 {
4388 struct ipa_parm_adjustment *cand
4389 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4390 if (!cand)
4391 return false;
4392
4393 tree src;
4394 if (cand->by_ref)
4395 src = build_simple_mem_ref (cand->new_decl);
4396 else
4397 src = cand->new_decl;
4398
4399 if (dump_file && (dump_flags & TDF_DETAILS))
4400 {
4401 fprintf (dump_file, "About to replace expr ");
4402 print_generic_expr (dump_file, *expr, 0);
4403 fprintf (dump_file, " with ");
4404 print_generic_expr (dump_file, src, 0);
4405 fprintf (dump_file, "\n");
4406 }
4407
4408 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4409 {
4410 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4411 *expr = vce;
4412 }
4413 else
4414 *expr = src;
4415 return true;
4416 }
4417
4418 /* If T is an SSA_NAME, return NULL if it is not a default def or
4419 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4420 the base variable is always returned, regardless if it is a default
4421 def. Return T if it is not an SSA_NAME. */
4422
4423 static tree
4424 get_ssa_base_param (tree t, bool ignore_default_def)
4425 {
4426 if (TREE_CODE (t) == SSA_NAME)
4427 {
4428 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4429 return SSA_NAME_VAR (t);
4430 else
4431 return NULL_TREE;
4432 }
4433 return t;
4434 }
4435
4436 /* Given an expression, return an adjustment entry specifying the
4437 transformation to be done on EXPR. If no suitable adjustment entry
4438 was found, returns NULL.
4439
4440 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4441 default def, otherwise bail on them.
4442
4443 If CONVERT is non-NULL, this function will set *CONVERT if the
4444 expression provided is a component reference. ADJUSTMENTS is the
4445 adjustments vector. */
4446
4447 ipa_parm_adjustment *
4448 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4449 ipa_parm_adjustment_vec adjustments,
4450 bool ignore_default_def)
4451 {
4452 if (TREE_CODE (**expr) == BIT_FIELD_REF
4453 || TREE_CODE (**expr) == IMAGPART_EXPR
4454 || TREE_CODE (**expr) == REALPART_EXPR)
4455 {
4456 *expr = &TREE_OPERAND (**expr, 0);
4457 if (convert)
4458 *convert = true;
4459 }
4460
4461 HOST_WIDE_INT offset, size, max_size;
4462 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4463 if (!base || size == -1 || max_size == -1)
4464 return NULL;
4465
4466 if (TREE_CODE (base) == MEM_REF)
4467 {
4468 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4469 base = TREE_OPERAND (base, 0);
4470 }
4471
4472 base = get_ssa_base_param (base, ignore_default_def);
4473 if (!base || TREE_CODE (base) != PARM_DECL)
4474 return NULL;
4475
4476 struct ipa_parm_adjustment *cand = NULL;
4477 unsigned int len = adjustments.length ();
4478 for (unsigned i = 0; i < len; i++)
4479 {
4480 struct ipa_parm_adjustment *adj = &adjustments[i];
4481
4482 if (adj->base == base
4483 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4484 {
4485 cand = adj;
4486 break;
4487 }
4488 }
4489
4490 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4491 return NULL;
4492 return cand;
4493 }
4494
4495 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4496
4497 static bool
4498 index_in_adjustments_multiple_times_p (int base_index,
4499 ipa_parm_adjustment_vec adjustments)
4500 {
4501 int i, len = adjustments.length ();
4502 bool one = false;
4503
4504 for (i = 0; i < len; i++)
4505 {
4506 struct ipa_parm_adjustment *adj;
4507 adj = &adjustments[i];
4508
4509 if (adj->base_index == base_index)
4510 {
4511 if (one)
4512 return true;
4513 else
4514 one = true;
4515 }
4516 }
4517 return false;
4518 }
4519
4520
4521 /* Return adjustments that should have the same effect on function parameters
4522 and call arguments as if they were first changed according to adjustments in
4523 INNER and then by adjustments in OUTER. */
4524
4525 ipa_parm_adjustment_vec
4526 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4527 ipa_parm_adjustment_vec outer)
4528 {
4529 int i, outlen = outer.length ();
4530 int inlen = inner.length ();
4531 int removals = 0;
4532 ipa_parm_adjustment_vec adjustments, tmp;
4533
4534 tmp.create (inlen);
4535 for (i = 0; i < inlen; i++)
4536 {
4537 struct ipa_parm_adjustment *n;
4538 n = &inner[i];
4539
4540 if (n->op == IPA_PARM_OP_REMOVE)
4541 removals++;
4542 else
4543 {
4544 /* FIXME: Handling of new arguments are not implemented yet. */
4545 gcc_assert (n->op != IPA_PARM_OP_NEW);
4546 tmp.quick_push (*n);
4547 }
4548 }
4549
4550 adjustments.create (outlen + removals);
4551 for (i = 0; i < outlen; i++)
4552 {
4553 struct ipa_parm_adjustment r;
4554 struct ipa_parm_adjustment *out = &outer[i];
4555 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4556
4557 memset (&r, 0, sizeof (r));
4558 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4559 if (out->op == IPA_PARM_OP_REMOVE)
4560 {
4561 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4562 {
4563 r.op = IPA_PARM_OP_REMOVE;
4564 adjustments.quick_push (r);
4565 }
4566 continue;
4567 }
4568 else
4569 {
4570 /* FIXME: Handling of new arguments are not implemented yet. */
4571 gcc_assert (out->op != IPA_PARM_OP_NEW);
4572 }
4573
4574 r.base_index = in->base_index;
4575 r.type = out->type;
4576
4577 /* FIXME: Create nonlocal value too. */
4578
4579 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4580 r.op = IPA_PARM_OP_COPY;
4581 else if (in->op == IPA_PARM_OP_COPY)
4582 r.offset = out->offset;
4583 else if (out->op == IPA_PARM_OP_COPY)
4584 r.offset = in->offset;
4585 else
4586 r.offset = in->offset + out->offset;
4587 adjustments.quick_push (r);
4588 }
4589
4590 for (i = 0; i < inlen; i++)
4591 {
4592 struct ipa_parm_adjustment *n = &inner[i];
4593
4594 if (n->op == IPA_PARM_OP_REMOVE)
4595 adjustments.quick_push (*n);
4596 }
4597
4598 tmp.release ();
4599 return adjustments;
4600 }
4601
4602 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4603 friendly way, assuming they are meant to be applied to FNDECL. */
4604
4605 void
4606 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4607 tree fndecl)
4608 {
4609 int i, len = adjustments.length ();
4610 bool first = true;
4611 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4612
4613 fprintf (file, "IPA param adjustments: ");
4614 for (i = 0; i < len; i++)
4615 {
4616 struct ipa_parm_adjustment *adj;
4617 adj = &adjustments[i];
4618
4619 if (!first)
4620 fprintf (file, " ");
4621 else
4622 first = false;
4623
4624 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4625 print_generic_expr (file, parms[adj->base_index], 0);
4626 if (adj->base)
4627 {
4628 fprintf (file, ", base: ");
4629 print_generic_expr (file, adj->base, 0);
4630 }
4631 if (adj->new_decl)
4632 {
4633 fprintf (file, ", new_decl: ");
4634 print_generic_expr (file, adj->new_decl, 0);
4635 }
4636 if (adj->new_ssa_base)
4637 {
4638 fprintf (file, ", new_ssa_base: ");
4639 print_generic_expr (file, adj->new_ssa_base, 0);
4640 }
4641
4642 if (adj->op == IPA_PARM_OP_COPY)
4643 fprintf (file, ", copy_param");
4644 else if (adj->op == IPA_PARM_OP_REMOVE)
4645 fprintf (file, ", remove_param");
4646 else
4647 fprintf (file, ", offset %li", (long) adj->offset);
4648 if (adj->by_ref)
4649 fprintf (file, ", by_ref");
4650 print_node_brief (file, ", type: ", adj->type, 0);
4651 fprintf (file, "\n");
4652 }
4653 parms.release ();
4654 }
4655
4656 /* Dump the AV linked list. */
4657
4658 void
4659 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4660 {
4661 bool comma = false;
4662 fprintf (f, " Aggregate replacements:");
4663 for (; av; av = av->next)
4664 {
4665 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4666 av->index, av->offset);
4667 print_generic_expr (f, av->value, 0);
4668 comma = true;
4669 }
4670 fprintf (f, "\n");
4671 }
4672
4673 /* Stream out jump function JUMP_FUNC to OB. */
4674
4675 static void
4676 ipa_write_jump_function (struct output_block *ob,
4677 struct ipa_jump_func *jump_func)
4678 {
4679 struct ipa_agg_jf_item *item;
4680 struct bitpack_d bp;
4681 int i, count;
4682
4683 streamer_write_uhwi (ob, jump_func->type);
4684 switch (jump_func->type)
4685 {
4686 case IPA_JF_UNKNOWN:
4687 break;
4688 case IPA_JF_KNOWN_TYPE:
4689 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4690 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4691 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4692 break;
4693 case IPA_JF_CONST:
4694 gcc_assert (
4695 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4696 stream_write_tree (ob, jump_func->value.constant.value, true);
4697 break;
4698 case IPA_JF_PASS_THROUGH:
4699 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4700 if (jump_func->value.pass_through.operation == NOP_EXPR)
4701 {
4702 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4703 bp = bitpack_create (ob->main_stream);
4704 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4705 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4706 streamer_write_bitpack (&bp);
4707 }
4708 else
4709 {
4710 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4711 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4712 }
4713 break;
4714 case IPA_JF_ANCESTOR:
4715 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4716 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4717 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4718 bp = bitpack_create (ob->main_stream);
4719 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4720 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4721 streamer_write_bitpack (&bp);
4722 break;
4723 }
4724
4725 count = vec_safe_length (jump_func->agg.items);
4726 streamer_write_uhwi (ob, count);
4727 if (count)
4728 {
4729 bp = bitpack_create (ob->main_stream);
4730 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4731 streamer_write_bitpack (&bp);
4732 }
4733
4734 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4735 {
4736 streamer_write_uhwi (ob, item->offset);
4737 stream_write_tree (ob, item->value, true);
4738 }
4739 }
4740
4741 /* Read in jump function JUMP_FUNC from IB. */
4742
4743 static void
4744 ipa_read_jump_function (struct lto_input_block *ib,
4745 struct ipa_jump_func *jump_func,
4746 struct cgraph_edge *cs,
4747 struct data_in *data_in)
4748 {
4749 enum jump_func_type jftype;
4750 enum tree_code operation;
4751 int i, count;
4752
4753 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4754 switch (jftype)
4755 {
4756 case IPA_JF_UNKNOWN:
4757 jump_func->type = IPA_JF_UNKNOWN;
4758 break;
4759 case IPA_JF_KNOWN_TYPE:
4760 {
4761 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4762 tree base_type = stream_read_tree (ib, data_in);
4763 tree component_type = stream_read_tree (ib, data_in);
4764
4765 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4766 break;
4767 }
4768 case IPA_JF_CONST:
4769 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4770 break;
4771 case IPA_JF_PASS_THROUGH:
4772 operation = (enum tree_code) streamer_read_uhwi (ib);
4773 if (operation == NOP_EXPR)
4774 {
4775 int formal_id = streamer_read_uhwi (ib);
4776 struct bitpack_d bp = streamer_read_bitpack (ib);
4777 bool agg_preserved = bp_unpack_value (&bp, 1);
4778 bool type_preserved = bp_unpack_value (&bp, 1);
4779 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4780 type_preserved);
4781 }
4782 else
4783 {
4784 tree operand = stream_read_tree (ib, data_in);
4785 int formal_id = streamer_read_uhwi (ib);
4786 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4787 operation);
4788 }
4789 break;
4790 case IPA_JF_ANCESTOR:
4791 {
4792 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4793 tree type = stream_read_tree (ib, data_in);
4794 int formal_id = streamer_read_uhwi (ib);
4795 struct bitpack_d bp = streamer_read_bitpack (ib);
4796 bool agg_preserved = bp_unpack_value (&bp, 1);
4797 bool type_preserved = bp_unpack_value (&bp, 1);
4798
4799 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4800 type_preserved);
4801 break;
4802 }
4803 }
4804
4805 count = streamer_read_uhwi (ib);
4806 vec_alloc (jump_func->agg.items, count);
4807 if (count)
4808 {
4809 struct bitpack_d bp = streamer_read_bitpack (ib);
4810 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4811 }
4812 for (i = 0; i < count; i++)
4813 {
4814 struct ipa_agg_jf_item item;
4815 item.offset = streamer_read_uhwi (ib);
4816 item.value = stream_read_tree (ib, data_in);
4817 jump_func->agg.items->quick_push (item);
4818 }
4819 }
4820
4821 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4822 relevant to indirect inlining to OB. */
4823
4824 static void
4825 ipa_write_indirect_edge_info (struct output_block *ob,
4826 struct cgraph_edge *cs)
4827 {
4828 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4829 struct bitpack_d bp;
4830
4831 streamer_write_hwi (ob, ii->param_index);
4832 bp = bitpack_create (ob->main_stream);
4833 bp_pack_value (&bp, ii->polymorphic, 1);
4834 bp_pack_value (&bp, ii->agg_contents, 1);
4835 bp_pack_value (&bp, ii->member_ptr, 1);
4836 bp_pack_value (&bp, ii->by_ref, 1);
4837 bp_pack_value (&bp, ii->vptr_changed, 1);
4838 streamer_write_bitpack (&bp);
4839 if (ii->agg_contents || ii->polymorphic)
4840 streamer_write_hwi (ob, ii->offset);
4841 else
4842 gcc_assert (ii->offset == 0);
4843
4844 if (ii->polymorphic)
4845 {
4846 streamer_write_hwi (ob, ii->otr_token);
4847 stream_write_tree (ob, ii->otr_type, true);
4848 ii->context.stream_out (ob);
4849 }
4850 }
4851
4852 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4853 relevant to indirect inlining from IB. */
4854
4855 static void
4856 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4857 struct data_in *data_in,
4858 struct cgraph_edge *cs)
4859 {
4860 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4861 struct bitpack_d bp;
4862
4863 ii->param_index = (int) streamer_read_hwi (ib);
4864 bp = streamer_read_bitpack (ib);
4865 ii->polymorphic = bp_unpack_value (&bp, 1);
4866 ii->agg_contents = bp_unpack_value (&bp, 1);
4867 ii->member_ptr = bp_unpack_value (&bp, 1);
4868 ii->by_ref = bp_unpack_value (&bp, 1);
4869 ii->vptr_changed = bp_unpack_value (&bp, 1);
4870 if (ii->agg_contents || ii->polymorphic)
4871 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4872 else
4873 ii->offset = 0;
4874 if (ii->polymorphic)
4875 {
4876 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4877 ii->otr_type = stream_read_tree (ib, data_in);
4878 ii->context.stream_in (ib, data_in);
4879 }
4880 }
4881
4882 /* Stream out NODE info to OB. */
4883
4884 static void
4885 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4886 {
4887 int node_ref;
4888 lto_symtab_encoder_t encoder;
4889 struct ipa_node_params *info = IPA_NODE_REF (node);
4890 int j;
4891 struct cgraph_edge *e;
4892 struct bitpack_d bp;
4893
4894 encoder = ob->decl_state->symtab_node_encoder;
4895 node_ref = lto_symtab_encoder_encode (encoder, node);
4896 streamer_write_uhwi (ob, node_ref);
4897
4898 streamer_write_uhwi (ob, ipa_get_param_count (info));
4899 for (j = 0; j < ipa_get_param_count (info); j++)
4900 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4901 bp = bitpack_create (ob->main_stream);
4902 gcc_assert (info->analysis_done
4903 || ipa_get_param_count (info) == 0);
4904 gcc_assert (!info->node_enqueued);
4905 gcc_assert (!info->ipcp_orig_node);
4906 for (j = 0; j < ipa_get_param_count (info); j++)
4907 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4908 streamer_write_bitpack (&bp);
4909 for (j = 0; j < ipa_get_param_count (info); j++)
4910 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4911 for (e = node->callees; e; e = e->next_callee)
4912 {
4913 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4914
4915 streamer_write_uhwi (ob,
4916 ipa_get_cs_argument_count (args) * 2
4917 + (args->polymorphic_call_contexts != NULL));
4918 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4919 {
4920 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4921 if (args->polymorphic_call_contexts != NULL)
4922 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4923 }
4924 }
4925 for (e = node->indirect_calls; e; e = e->next_callee)
4926 {
4927 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4928
4929 streamer_write_uhwi (ob,
4930 ipa_get_cs_argument_count (args) * 2
4931 + (args->polymorphic_call_contexts != NULL));
4932 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4933 {
4934 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4935 if (args->polymorphic_call_contexts != NULL)
4936 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4937 }
4938 ipa_write_indirect_edge_info (ob, e);
4939 }
4940 }
4941
4942 /* Stream in NODE info from IB. */
4943
4944 static void
4945 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4946 struct data_in *data_in)
4947 {
4948 struct ipa_node_params *info = IPA_NODE_REF (node);
4949 int k;
4950 struct cgraph_edge *e;
4951 struct bitpack_d bp;
4952
4953 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4954
4955 for (k = 0; k < ipa_get_param_count (info); k++)
4956 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4957
4958 bp = streamer_read_bitpack (ib);
4959 if (ipa_get_param_count (info) != 0)
4960 info->analysis_done = true;
4961 info->node_enqueued = false;
4962 for (k = 0; k < ipa_get_param_count (info); k++)
4963 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4964 for (k = 0; k < ipa_get_param_count (info); k++)
4965 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4966 for (e = node->callees; e; e = e->next_callee)
4967 {
4968 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4969 int count = streamer_read_uhwi (ib);
4970 bool contexts_computed = count & 1;
4971 count /= 2;
4972
4973 if (!count)
4974 continue;
4975 vec_safe_grow_cleared (args->jump_functions, count);
4976 if (contexts_computed)
4977 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4978
4979 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4980 {
4981 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4982 data_in);
4983 if (contexts_computed)
4984 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4985 }
4986 }
4987 for (e = node->indirect_calls; e; e = e->next_callee)
4988 {
4989 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4990 int count = streamer_read_uhwi (ib);
4991 bool contexts_computed = count & 1;
4992 count /= 2;
4993
4994 if (count)
4995 {
4996 vec_safe_grow_cleared (args->jump_functions, count);
4997 if (contexts_computed)
4998 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4999 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5000 {
5001 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5002 data_in);
5003 if (contexts_computed)
5004 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5005 }
5006 }
5007 ipa_read_indirect_edge_info (ib, data_in, e);
5008 }
5009 }
5010
5011 /* Write jump functions for nodes in SET. */
5012
5013 void
5014 ipa_prop_write_jump_functions (void)
5015 {
5016 struct cgraph_node *node;
5017 struct output_block *ob;
5018 unsigned int count = 0;
5019 lto_symtab_encoder_iterator lsei;
5020 lto_symtab_encoder_t encoder;
5021
5022
5023 if (!ipa_node_params_vector.exists ())
5024 return;
5025
5026 ob = create_output_block (LTO_section_jump_functions);
5027 encoder = ob->decl_state->symtab_node_encoder;
5028 ob->symbol = NULL;
5029 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5030 lsei_next_function_in_partition (&lsei))
5031 {
5032 node = lsei_cgraph_node (lsei);
5033 if (node->has_gimple_body_p ()
5034 && IPA_NODE_REF (node) != NULL)
5035 count++;
5036 }
5037
5038 streamer_write_uhwi (ob, count);
5039
5040 /* Process all of the functions. */
5041 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5042 lsei_next_function_in_partition (&lsei))
5043 {
5044 node = lsei_cgraph_node (lsei);
5045 if (node->has_gimple_body_p ()
5046 && IPA_NODE_REF (node) != NULL)
5047 ipa_write_node_info (ob, node);
5048 }
5049 streamer_write_char_stream (ob->main_stream, 0);
5050 produce_asm (ob, NULL);
5051 destroy_output_block (ob);
5052 }
5053
5054 /* Read section in file FILE_DATA of length LEN with data DATA. */
5055
5056 static void
5057 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5058 size_t len)
5059 {
5060 const struct lto_function_header *header =
5061 (const struct lto_function_header *) data;
5062 const int cfg_offset = sizeof (struct lto_function_header);
5063 const int main_offset = cfg_offset + header->cfg_size;
5064 const int string_offset = main_offset + header->main_size;
5065 struct data_in *data_in;
5066 unsigned int i;
5067 unsigned int count;
5068
5069 lto_input_block ib_main ((const char *) data + main_offset,
5070 header->main_size);
5071
5072 data_in =
5073 lto_data_in_create (file_data, (const char *) data + string_offset,
5074 header->string_size, vNULL);
5075 count = streamer_read_uhwi (&ib_main);
5076
5077 for (i = 0; i < count; i++)
5078 {
5079 unsigned int index;
5080 struct cgraph_node *node;
5081 lto_symtab_encoder_t encoder;
5082
5083 index = streamer_read_uhwi (&ib_main);
5084 encoder = file_data->symtab_node_encoder;
5085 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5086 index));
5087 gcc_assert (node->definition);
5088 ipa_read_node_info (&ib_main, node, data_in);
5089 }
5090 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5091 len);
5092 lto_data_in_delete (data_in);
5093 }
5094
5095 /* Read ipcp jump functions. */
5096
5097 void
5098 ipa_prop_read_jump_functions (void)
5099 {
5100 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5101 struct lto_file_decl_data *file_data;
5102 unsigned int j = 0;
5103
5104 ipa_check_create_node_params ();
5105 ipa_check_create_edge_args ();
5106 ipa_register_cgraph_hooks ();
5107
5108 while ((file_data = file_data_vec[j++]))
5109 {
5110 size_t len;
5111 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5112
5113 if (data)
5114 ipa_prop_read_section (file_data, data, len);
5115 }
5116 }
5117
5118 /* After merging units, we can get mismatch in argument counts.
5119 Also decl merging might've rendered parameter lists obsolete.
5120 Also compute called_with_variable_arg info. */
5121
5122 void
5123 ipa_update_after_lto_read (void)
5124 {
5125 ipa_check_create_node_params ();
5126 ipa_check_create_edge_args ();
5127 }
5128
5129 void
5130 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
5131 {
5132 int node_ref;
5133 unsigned int count = 0;
5134 lto_symtab_encoder_t encoder;
5135 struct ipa_agg_replacement_value *aggvals, *av;
5136
5137 aggvals = ipa_get_agg_replacements_for_node (node);
5138 encoder = ob->decl_state->symtab_node_encoder;
5139 node_ref = lto_symtab_encoder_encode (encoder, node);
5140 streamer_write_uhwi (ob, node_ref);
5141
5142 for (av = aggvals; av; av = av->next)
5143 count++;
5144 streamer_write_uhwi (ob, count);
5145
5146 for (av = aggvals; av; av = av->next)
5147 {
5148 struct bitpack_d bp;
5149
5150 streamer_write_uhwi (ob, av->offset);
5151 streamer_write_uhwi (ob, av->index);
5152 stream_write_tree (ob, av->value, true);
5153
5154 bp = bitpack_create (ob->main_stream);
5155 bp_pack_value (&bp, av->by_ref, 1);
5156 streamer_write_bitpack (&bp);
5157 }
5158 }
5159
5160 /* Stream in the aggregate value replacement chain for NODE from IB. */
5161
5162 static void
5163 read_agg_replacement_chain (struct lto_input_block *ib,
5164 struct cgraph_node *node,
5165 struct data_in *data_in)
5166 {
5167 struct ipa_agg_replacement_value *aggvals = NULL;
5168 unsigned int count, i;
5169
5170 count = streamer_read_uhwi (ib);
5171 for (i = 0; i <count; i++)
5172 {
5173 struct ipa_agg_replacement_value *av;
5174 struct bitpack_d bp;
5175
5176 av = ggc_alloc<ipa_agg_replacement_value> ();
5177 av->offset = streamer_read_uhwi (ib);
5178 av->index = streamer_read_uhwi (ib);
5179 av->value = stream_read_tree (ib, data_in);
5180 bp = streamer_read_bitpack (ib);
5181 av->by_ref = bp_unpack_value (&bp, 1);
5182 av->next = aggvals;
5183 aggvals = av;
5184 }
5185 ipa_set_node_agg_value_chain (node, aggvals);
5186 }
5187
5188 /* Write all aggregate replacement for nodes in set. */
5189
5190 void
5191 ipa_prop_write_all_agg_replacement (void)
5192 {
5193 struct cgraph_node *node;
5194 struct output_block *ob;
5195 unsigned int count = 0;
5196 lto_symtab_encoder_iterator lsei;
5197 lto_symtab_encoder_t encoder;
5198
5199 if (!ipa_node_agg_replacements)
5200 return;
5201
5202 ob = create_output_block (LTO_section_ipcp_transform);
5203 encoder = ob->decl_state->symtab_node_encoder;
5204 ob->symbol = NULL;
5205 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5206 lsei_next_function_in_partition (&lsei))
5207 {
5208 node = lsei_cgraph_node (lsei);
5209 if (node->has_gimple_body_p ()
5210 && ipa_get_agg_replacements_for_node (node) != NULL)
5211 count++;
5212 }
5213
5214 streamer_write_uhwi (ob, count);
5215
5216 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5217 lsei_next_function_in_partition (&lsei))
5218 {
5219 node = lsei_cgraph_node (lsei);
5220 if (node->has_gimple_body_p ()
5221 && ipa_get_agg_replacements_for_node (node) != NULL)
5222 write_agg_replacement_chain (ob, node);
5223 }
5224 streamer_write_char_stream (ob->main_stream, 0);
5225 produce_asm (ob, NULL);
5226 destroy_output_block (ob);
5227 }
5228
5229 /* Read replacements section in file FILE_DATA of length LEN with data
5230 DATA. */
5231
5232 static void
5233 read_replacements_section (struct lto_file_decl_data *file_data,
5234 const char *data,
5235 size_t len)
5236 {
5237 const struct lto_function_header *header =
5238 (const struct lto_function_header *) data;
5239 const int cfg_offset = sizeof (struct lto_function_header);
5240 const int main_offset = cfg_offset + header->cfg_size;
5241 const int string_offset = main_offset + header->main_size;
5242 struct data_in *data_in;
5243 unsigned int i;
5244 unsigned int count;
5245
5246 lto_input_block ib_main ((const char *) data + main_offset,
5247 header->main_size);
5248
5249 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5250 header->string_size, vNULL);
5251 count = streamer_read_uhwi (&ib_main);
5252
5253 for (i = 0; i < count; i++)
5254 {
5255 unsigned int index;
5256 struct cgraph_node *node;
5257 lto_symtab_encoder_t encoder;
5258
5259 index = streamer_read_uhwi (&ib_main);
5260 encoder = file_data->symtab_node_encoder;
5261 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5262 index));
5263 gcc_assert (node->definition);
5264 read_agg_replacement_chain (&ib_main, node, data_in);
5265 }
5266 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5267 len);
5268 lto_data_in_delete (data_in);
5269 }
5270
5271 /* Read IPA-CP aggregate replacements. */
5272
5273 void
5274 ipa_prop_read_all_agg_replacement (void)
5275 {
5276 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5277 struct lto_file_decl_data *file_data;
5278 unsigned int j = 0;
5279
5280 while ((file_data = file_data_vec[j++]))
5281 {
5282 size_t len;
5283 const char *data = lto_get_section_data (file_data,
5284 LTO_section_ipcp_transform,
5285 NULL, &len);
5286 if (data)
5287 read_replacements_section (file_data, data, len);
5288 }
5289 }
5290
5291 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5292 NODE. */
5293
5294 static void
5295 adjust_agg_replacement_values (struct cgraph_node *node,
5296 struct ipa_agg_replacement_value *aggval)
5297 {
5298 struct ipa_agg_replacement_value *v;
5299 int i, c = 0, d = 0, *adj;
5300
5301 if (!node->clone.combined_args_to_skip)
5302 return;
5303
5304 for (v = aggval; v; v = v->next)
5305 {
5306 gcc_assert (v->index >= 0);
5307 if (c < v->index)
5308 c = v->index;
5309 }
5310 c++;
5311
5312 adj = XALLOCAVEC (int, c);
5313 for (i = 0; i < c; i++)
5314 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5315 {
5316 adj[i] = -1;
5317 d++;
5318 }
5319 else
5320 adj[i] = i - d;
5321
5322 for (v = aggval; v; v = v->next)
5323 v->index = adj[v->index];
5324 }
5325
5326 /* Dominator walker driving the ipcp modification phase. */
5327
5328 class ipcp_modif_dom_walker : public dom_walker
5329 {
5330 public:
5331 ipcp_modif_dom_walker (struct func_body_info *fbi,
5332 vec<ipa_param_descriptor> descs,
5333 struct ipa_agg_replacement_value *av,
5334 bool *sc, bool *cc)
5335 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5336 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5337
5338 virtual void before_dom_children (basic_block);
5339
5340 private:
5341 struct func_body_info *m_fbi;
5342 vec<ipa_param_descriptor> m_descriptors;
5343 struct ipa_agg_replacement_value *m_aggval;
5344 bool *m_something_changed, *m_cfg_changed;
5345 };
5346
5347 void
5348 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5349 {
5350 gimple_stmt_iterator gsi;
5351 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5352 {
5353 struct ipa_agg_replacement_value *v;
5354 gimple stmt = gsi_stmt (gsi);
5355 tree rhs, val, t;
5356 HOST_WIDE_INT offset, size;
5357 int index;
5358 bool by_ref, vce;
5359
5360 if (!gimple_assign_load_p (stmt))
5361 continue;
5362 rhs = gimple_assign_rhs1 (stmt);
5363 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5364 continue;
5365
5366 vce = false;
5367 t = rhs;
5368 while (handled_component_p (t))
5369 {
5370 /* V_C_E can do things like convert an array of integers to one
5371 bigger integer and similar things we do not handle below. */
5372 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5373 {
5374 vce = true;
5375 break;
5376 }
5377 t = TREE_OPERAND (t, 0);
5378 }
5379 if (vce)
5380 continue;
5381
5382 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5383 &offset, &size, &by_ref))
5384 continue;
5385 for (v = m_aggval; v; v = v->next)
5386 if (v->index == index
5387 && v->offset == offset)
5388 break;
5389 if (!v
5390 || v->by_ref != by_ref
5391 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5392 continue;
5393
5394 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5395 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5396 {
5397 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5398 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5399 else if (TYPE_SIZE (TREE_TYPE (rhs))
5400 == TYPE_SIZE (TREE_TYPE (v->value)))
5401 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5402 else
5403 {
5404 if (dump_file)
5405 {
5406 fprintf (dump_file, " const ");
5407 print_generic_expr (dump_file, v->value, 0);
5408 fprintf (dump_file, " can't be converted to type of ");
5409 print_generic_expr (dump_file, rhs, 0);
5410 fprintf (dump_file, "\n");
5411 }
5412 continue;
5413 }
5414 }
5415 else
5416 val = v->value;
5417
5418 if (dump_file && (dump_flags & TDF_DETAILS))
5419 {
5420 fprintf (dump_file, "Modifying stmt:\n ");
5421 print_gimple_stmt (dump_file, stmt, 0, 0);
5422 }
5423 gimple_assign_set_rhs_from_tree (&gsi, val);
5424 update_stmt (stmt);
5425
5426 if (dump_file && (dump_flags & TDF_DETAILS))
5427 {
5428 fprintf (dump_file, "into:\n ");
5429 print_gimple_stmt (dump_file, stmt, 0, 0);
5430 fprintf (dump_file, "\n");
5431 }
5432
5433 *m_something_changed = true;
5434 if (maybe_clean_eh_stmt (stmt)
5435 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5436 *m_cfg_changed = true;
5437 }
5438
5439 }
5440
5441 /* IPCP transformation phase doing propagation of aggregate values. */
5442
5443 unsigned int
5444 ipcp_transform_function (struct cgraph_node *node)
5445 {
5446 vec<ipa_param_descriptor> descriptors = vNULL;
5447 struct func_body_info fbi;
5448 struct ipa_agg_replacement_value *aggval;
5449 int param_count;
5450 bool cfg_changed = false, something_changed = false;
5451
5452 gcc_checking_assert (cfun);
5453 gcc_checking_assert (current_function_decl);
5454
5455 if (dump_file)
5456 fprintf (dump_file, "Modification phase of node %s/%i\n",
5457 node->name (), node->order);
5458
5459 aggval = ipa_get_agg_replacements_for_node (node);
5460 if (!aggval)
5461 return 0;
5462 param_count = count_formal_params (node->decl);
5463 if (param_count == 0)
5464 return 0;
5465 adjust_agg_replacement_values (node, aggval);
5466 if (dump_file)
5467 ipa_dump_agg_replacement_values (dump_file, aggval);
5468
5469 fbi.node = node;
5470 fbi.info = NULL;
5471 fbi.bb_infos = vNULL;
5472 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5473 fbi.param_count = param_count;
5474 fbi.aa_walked = 0;
5475
5476 descriptors.safe_grow_cleared (param_count);
5477 ipa_populate_param_decls (node, descriptors);
5478 calculate_dominance_info (CDI_DOMINATORS);
5479 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5480 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5481
5482 int i;
5483 struct ipa_bb_info *bi;
5484 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5485 free_ipa_bb_info (bi);
5486 fbi.bb_infos.release ();
5487 free_dominance_info (CDI_DOMINATORS);
5488 (*ipa_node_agg_replacements)[node->uid] = NULL;
5489 descriptors.release ();
5490
5491 if (!something_changed)
5492 return 0;
5493 else if (cfg_changed)
5494 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5495 else
5496 return TODO_update_ssa_only_virtuals;
5497 }