tree-ssa-ccp.c (prop_value_d): Rename to ...
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "dbgcnt.h"
63 #include "domwalk.h"
64 #include "builtins.h"
65 #include "calls.h"
66
67 /* Intermediate information that we get from alias analysis about a particular
68 parameter in a particular basic_block. When a parameter or the memory it
69 references is marked modified, we use that information in all dominatd
70 blocks without cosulting alias analysis oracle. */
71
72 struct param_aa_status
73 {
74 /* Set when this structure contains meaningful information. If not, the
75 structure describing a dominating BB should be used instead. */
76 bool valid;
77
78 /* Whether we have seen something which might have modified the data in
79 question. PARM is for the parameter itself, REF is for data it points to
80 but using the alias type of individual accesses and PT is the same thing
81 but for computing aggregate pass-through functions using a very inclusive
82 ao_ref. */
83 bool parm_modified, ref_modified, pt_modified;
84 };
85
86 /* Information related to a given BB that used only when looking at function
87 body. */
88
89 struct ipa_bb_info
90 {
91 /* Call graph edges going out of this BB. */
92 vec<cgraph_edge *> cg_edges;
93 /* Alias analysis statuses of each formal parameter at this bb. */
94 vec<param_aa_status> param_aa_statuses;
95 };
96
97 /* Structure with global information that is only used when looking at function
98 body. */
99
100 struct func_body_info
101 {
102 /* The node that is being analyzed. */
103 cgraph_node *node;
104
105 /* Its info. */
106 struct ipa_node_params *info;
107
108 /* Information about individual BBs. */
109 vec<ipa_bb_info> bb_infos;
110
111 /* Number of parameters. */
112 int param_count;
113
114 /* Number of statements already walked by when analyzing this function. */
115 unsigned int aa_walked;
116 };
117
118 /* Vector where the parameter infos are actually stored. */
119 vec<ipa_node_params> ipa_node_params_vector;
120 /* Vector of known aggregate values in cloned nodes. */
121 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
122 /* Vector where the parameter infos are actually stored. */
123 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
124
125 /* Holders of ipa cgraph hooks: */
126 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
127 static struct cgraph_node_hook_list *node_removal_hook_holder;
128 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
129 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
130 static struct cgraph_node_hook_list *function_insertion_hook_holder;
131
132 /* Description of a reference to an IPA constant. */
133 struct ipa_cst_ref_desc
134 {
135 /* Edge that corresponds to the statement which took the reference. */
136 struct cgraph_edge *cs;
137 /* Linked list of duplicates created when call graph edges are cloned. */
138 struct ipa_cst_ref_desc *next_duplicate;
139 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
140 if out of control. */
141 int refcount;
142 };
143
144 /* Allocation pool for reference descriptions. */
145
146 static alloc_pool ipa_refdesc_pool;
147
148 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
149 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
150
151 static bool
152 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
153 {
154 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
155 struct cl_optimization *os;
156
157 if (!fs_opts)
158 return false;
159 os = TREE_OPTIMIZATION (fs_opts);
160 return !os->x_optimize || !os->x_flag_ipa_cp;
161 }
162
163 /* Return index of the formal whose tree is PTREE in function which corresponds
164 to INFO. */
165
166 static int
167 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
168 {
169 int i, count;
170
171 count = descriptors.length ();
172 for (i = 0; i < count; i++)
173 if (descriptors[i].decl == ptree)
174 return i;
175
176 return -1;
177 }
178
179 /* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
181
182 int
183 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
184 {
185 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
186 }
187
188 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
189 NODE. */
190
191 static void
192 ipa_populate_param_decls (struct cgraph_node *node,
193 vec<ipa_param_descriptor> &descriptors)
194 {
195 tree fndecl;
196 tree fnargs;
197 tree parm;
198 int param_num;
199
200 fndecl = node->decl;
201 gcc_assert (gimple_has_body_p (fndecl));
202 fnargs = DECL_ARGUMENTS (fndecl);
203 param_num = 0;
204 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
205 {
206 descriptors[param_num].decl = parm;
207 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
208 true);
209 param_num++;
210 }
211 }
212
213 /* Return how many formal parameters FNDECL has. */
214
215 int
216 count_formal_params (tree fndecl)
217 {
218 tree parm;
219 int count = 0;
220 gcc_assert (gimple_has_body_p (fndecl));
221
222 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
223 count++;
224
225 return count;
226 }
227
228 /* Return the declaration of Ith formal parameter of the function corresponding
229 to INFO. Note there is no setter function as this array is built just once
230 using ipa_initialize_node_params. */
231
232 void
233 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
234 {
235 fprintf (file, "param #%i", i);
236 if (info->descriptors[i].decl)
237 {
238 fprintf (file, " ");
239 print_generic_expr (file, info->descriptors[i].decl, 0);
240 }
241 }
242
243 /* Initialize the ipa_node_params structure associated with NODE
244 to hold PARAM_COUNT parameters. */
245
246 void
247 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
248 {
249 struct ipa_node_params *info = IPA_NODE_REF (node);
250
251 if (!info->descriptors.exists () && param_count)
252 info->descriptors.safe_grow_cleared (param_count);
253 }
254
255 /* Initialize the ipa_node_params structure associated with NODE by counting
256 the function parameters, creating the descriptors and populating their
257 param_decls. */
258
259 void
260 ipa_initialize_node_params (struct cgraph_node *node)
261 {
262 struct ipa_node_params *info = IPA_NODE_REF (node);
263
264 if (!info->descriptors.exists ())
265 {
266 ipa_alloc_node_params (node, count_formal_params (node->decl));
267 ipa_populate_param_decls (node, info->descriptors);
268 }
269 }
270
271 /* Print the jump functions associated with call graph edge CS to file F. */
272
273 static void
274 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
275 {
276 int i, count;
277
278 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
279 for (i = 0; i < count; i++)
280 {
281 struct ipa_jump_func *jump_func;
282 enum jump_func_type type;
283
284 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
285 type = jump_func->type;
286
287 fprintf (f, " param %d: ", i);
288 if (type == IPA_JF_UNKNOWN)
289 fprintf (f, "UNKNOWN\n");
290 else if (type == IPA_JF_KNOWN_TYPE)
291 {
292 fprintf (f, "KNOWN TYPE: base ");
293 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
294 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
295 jump_func->value.known_type.offset);
296 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
297 fprintf (f, "\n");
298 }
299 else if (type == IPA_JF_CONST)
300 {
301 tree val = jump_func->value.constant.value;
302 fprintf (f, "CONST: ");
303 print_generic_expr (f, val, 0);
304 if (TREE_CODE (val) == ADDR_EXPR
305 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
306 {
307 fprintf (f, " -> ");
308 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
309 0);
310 }
311 fprintf (f, "\n");
312 }
313 else if (type == IPA_JF_PASS_THROUGH)
314 {
315 fprintf (f, "PASS THROUGH: ");
316 fprintf (f, "%d, op %s",
317 jump_func->value.pass_through.formal_id,
318 get_tree_code_name(jump_func->value.pass_through.operation));
319 if (jump_func->value.pass_through.operation != NOP_EXPR)
320 {
321 fprintf (f, " ");
322 print_generic_expr (f,
323 jump_func->value.pass_through.operand, 0);
324 }
325 if (jump_func->value.pass_through.agg_preserved)
326 fprintf (f, ", agg_preserved");
327 if (jump_func->value.pass_through.type_preserved)
328 fprintf (f, ", type_preserved");
329 fprintf (f, "\n");
330 }
331 else if (type == IPA_JF_ANCESTOR)
332 {
333 fprintf (f, "ANCESTOR: ");
334 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
335 jump_func->value.ancestor.formal_id,
336 jump_func->value.ancestor.offset);
337 print_generic_expr (f, jump_func->value.ancestor.type, 0);
338 if (jump_func->value.ancestor.agg_preserved)
339 fprintf (f, ", agg_preserved");
340 if (jump_func->value.ancestor.type_preserved)
341 fprintf (f, ", type_preserved");
342 fprintf (f, "\n");
343 }
344
345 if (jump_func->agg.items)
346 {
347 struct ipa_agg_jf_item *item;
348 int j;
349
350 fprintf (f, " Aggregate passed by %s:\n",
351 jump_func->agg.by_ref ? "reference" : "value");
352 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
353 {
354 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
355 item->offset);
356 if (TYPE_P (item->value))
357 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
358 tree_to_uhwi (TYPE_SIZE (item->value)));
359 else
360 {
361 fprintf (f, "cst: ");
362 print_generic_expr (f, item->value, 0);
363 }
364 fprintf (f, "\n");
365 }
366 }
367 }
368 }
369
370
371 /* Print the jump functions of all arguments on all call graph edges going from
372 NODE to file F. */
373
374 void
375 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
376 {
377 struct cgraph_edge *cs;
378
379 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
380 node->order);
381 for (cs = node->callees; cs; cs = cs->next_callee)
382 {
383 if (!ipa_edge_args_info_available_for_edge_p (cs))
384 continue;
385
386 fprintf (f, " callsite %s/%i -> %s/%i : \n",
387 xstrdup (node->name ()), node->order,
388 xstrdup (cs->callee->name ()),
389 cs->callee->order);
390 ipa_print_node_jump_functions_for_edge (f, cs);
391 }
392
393 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
394 {
395 struct cgraph_indirect_call_info *ii;
396 if (!ipa_edge_args_info_available_for_edge_p (cs))
397 continue;
398
399 ii = cs->indirect_info;
400 if (ii->agg_contents)
401 fprintf (f, " indirect %s callsite, calling param %i, "
402 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
403 ii->member_ptr ? "member ptr" : "aggregate",
404 ii->param_index, ii->offset,
405 ii->by_ref ? "by reference" : "by_value");
406 else
407 fprintf (f, " indirect %s callsite, calling param %i, "
408 "offset " HOST_WIDE_INT_PRINT_DEC,
409 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
410 ii->offset);
411
412 if (cs->call_stmt)
413 {
414 fprintf (f, ", for stmt ");
415 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
416 }
417 else
418 fprintf (f, "\n");
419 ipa_print_node_jump_functions_for_edge (f, cs);
420 }
421 }
422
423 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
424
425 void
426 ipa_print_all_jump_functions (FILE *f)
427 {
428 struct cgraph_node *node;
429
430 fprintf (f, "\nJump functions:\n");
431 FOR_EACH_FUNCTION (node)
432 {
433 ipa_print_node_jump_functions (f, node);
434 }
435 }
436
437 /* Set JFUNC to be a known type jump function. */
438
439 static void
440 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
441 tree base_type, tree component_type)
442 {
443 /* Recording and propagating main variants increases change that types
444 will match. */
445 base_type = TYPE_MAIN_VARIANT (base_type);
446 component_type = TYPE_MAIN_VARIANT (component_type);
447
448 gcc_assert (contains_polymorphic_type_p (base_type)
449 && contains_polymorphic_type_p (component_type));
450 if (!flag_devirtualize)
451 return;
452 jfunc->type = IPA_JF_KNOWN_TYPE;
453 jfunc->value.known_type.offset = offset,
454 jfunc->value.known_type.base_type = base_type;
455 jfunc->value.known_type.component_type = component_type;
456 gcc_assert (component_type);
457 }
458
459 /* Set JFUNC to be a copy of another jmp (to be used by jump function
460 combination code). The two functions will share their rdesc. */
461
462 static void
463 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
464 struct ipa_jump_func *src)
465
466 {
467 gcc_checking_assert (src->type == IPA_JF_CONST);
468 dst->type = IPA_JF_CONST;
469 dst->value.constant = src->value.constant;
470 }
471
472 /* Set JFUNC to be a constant jmp function. */
473
474 static void
475 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
476 struct cgraph_edge *cs)
477 {
478 constant = unshare_expr (constant);
479 if (constant && EXPR_P (constant))
480 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
481 jfunc->type = IPA_JF_CONST;
482 jfunc->value.constant.value = unshare_expr_without_location (constant);
483
484 if (TREE_CODE (constant) == ADDR_EXPR
485 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
486 {
487 struct ipa_cst_ref_desc *rdesc;
488 if (!ipa_refdesc_pool)
489 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
490 sizeof (struct ipa_cst_ref_desc), 32);
491
492 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
493 rdesc->cs = cs;
494 rdesc->next_duplicate = NULL;
495 rdesc->refcount = 1;
496 jfunc->value.constant.rdesc = rdesc;
497 }
498 else
499 jfunc->value.constant.rdesc = NULL;
500 }
501
502 /* Set JFUNC to be a simple pass-through jump function. */
503 static void
504 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
505 bool agg_preserved, bool type_preserved)
506 {
507 jfunc->type = IPA_JF_PASS_THROUGH;
508 jfunc->value.pass_through.operand = NULL_TREE;
509 jfunc->value.pass_through.formal_id = formal_id;
510 jfunc->value.pass_through.operation = NOP_EXPR;
511 jfunc->value.pass_through.agg_preserved = agg_preserved;
512 jfunc->value.pass_through.type_preserved = type_preserved;
513 }
514
515 /* Set JFUNC to be an arithmetic pass through jump function. */
516
517 static void
518 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
519 tree operand, enum tree_code operation)
520 {
521 jfunc->type = IPA_JF_PASS_THROUGH;
522 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
523 jfunc->value.pass_through.formal_id = formal_id;
524 jfunc->value.pass_through.operation = operation;
525 jfunc->value.pass_through.agg_preserved = false;
526 jfunc->value.pass_through.type_preserved = false;
527 }
528
529 /* Set JFUNC to be an ancestor jump function. */
530
531 static void
532 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
533 tree type, int formal_id, bool agg_preserved,
534 bool type_preserved)
535 {
536 if (!flag_devirtualize)
537 type_preserved = false;
538 if (!type_preserved)
539 type = NULL_TREE;
540 if (type)
541 type = TYPE_MAIN_VARIANT (type);
542 gcc_assert (!type_preserved || contains_polymorphic_type_p (type));
543 jfunc->type = IPA_JF_ANCESTOR;
544 jfunc->value.ancestor.formal_id = formal_id;
545 jfunc->value.ancestor.offset = offset;
546 jfunc->value.ancestor.type = type_preserved ? type : NULL;
547 jfunc->value.ancestor.agg_preserved = agg_preserved;
548 jfunc->value.ancestor.type_preserved = type_preserved;
549 }
550
551 /* Extract the acual BINFO being described by JFUNC which must be a known type
552 jump function. */
553
554 tree
555 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
556 {
557 if (!RECORD_OR_UNION_TYPE_P (jfunc->value.known_type.base_type))
558 return NULL_TREE;
559
560 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
561
562 if (!base_binfo)
563 return NULL_TREE;
564 /* FIXME: At LTO we can't propagate to non-polymorphic type, because
565 we have no ODR equivalency on those. This should be fixed by
566 propagating on types rather than binfos that would make type
567 matching here unnecesary. */
568 if (in_lto_p
569 && (TREE_CODE (jfunc->value.known_type.component_type) != RECORD_TYPE
570 || !TYPE_BINFO (jfunc->value.known_type.component_type)
571 || !BINFO_VTABLE (TYPE_BINFO (jfunc->value.known_type.component_type))))
572 {
573 if (!jfunc->value.known_type.offset)
574 return base_binfo;
575 return NULL;
576 }
577 return get_binfo_at_offset (base_binfo,
578 jfunc->value.known_type.offset,
579 jfunc->value.known_type.component_type);
580 }
581
582 /* Get IPA BB information about the given BB. FBI is the context of analyzis
583 of this function body. */
584
585 static struct ipa_bb_info *
586 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
587 {
588 gcc_checking_assert (fbi);
589 return &fbi->bb_infos[bb->index];
590 }
591
592 /* Structure to be passed in between detect_type_change and
593 check_stmt_for_type_change. */
594
595 struct prop_type_change_info
596 {
597 /* Offset into the object where there is the virtual method pointer we are
598 looking for. */
599 HOST_WIDE_INT offset;
600 /* The declaration or SSA_NAME pointer of the base that we are checking for
601 type change. */
602 tree object;
603 /* If we actually can tell the type that the object has changed to, it is
604 stored in this field. Otherwise it remains NULL_TREE. */
605 tree known_current_type;
606 /* Set to true if dynamic type change has been detected. */
607 bool type_maybe_changed;
608 /* Set to true if multiple types have been encountered. known_current_type
609 must be disregarded in that case. */
610 bool multiple_types_encountered;
611 };
612
613 /* Return true if STMT can modify a virtual method table pointer.
614
615 This function makes special assumptions about both constructors and
616 destructors which are all the functions that are allowed to alter the VMT
617 pointers. It assumes that destructors begin with assignment into all VMT
618 pointers and that constructors essentially look in the following way:
619
620 1) The very first thing they do is that they call constructors of ancestor
621 sub-objects that have them.
622
623 2) Then VMT pointers of this and all its ancestors is set to new values
624 corresponding to the type corresponding to the constructor.
625
626 3) Only afterwards, other stuff such as constructor of member sub-objects
627 and the code written by the user is run. Only this may include calling
628 virtual functions, directly or indirectly.
629
630 There is no way to call a constructor of an ancestor sub-object in any
631 other way.
632
633 This means that we do not have to care whether constructors get the correct
634 type information because they will always change it (in fact, if we define
635 the type to be given by the VMT pointer, it is undefined).
636
637 The most important fact to derive from the above is that if, for some
638 statement in the section 3, we try to detect whether the dynamic type has
639 changed, we can safely ignore all calls as we examine the function body
640 backwards until we reach statements in section 2 because these calls cannot
641 be ancestor constructors or destructors (if the input is not bogus) and so
642 do not change the dynamic type (this holds true only for automatically
643 allocated objects but at the moment we devirtualize only these). We then
644 must detect that statements in section 2 change the dynamic type and can try
645 to derive the new type. That is enough and we can stop, we will never see
646 the calls into constructors of sub-objects in this code. Therefore we can
647 safely ignore all call statements that we traverse.
648 */
649
650 static bool
651 stmt_may_be_vtbl_ptr_store (gimple stmt)
652 {
653 if (is_gimple_call (stmt))
654 return false;
655 if (gimple_clobber_p (stmt))
656 return false;
657 else if (is_gimple_assign (stmt))
658 {
659 tree lhs = gimple_assign_lhs (stmt);
660
661 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
662 {
663 if (flag_strict_aliasing
664 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
665 return false;
666
667 if (TREE_CODE (lhs) == COMPONENT_REF
668 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
669 return false;
670 /* In the future we might want to use get_base_ref_and_offset to find
671 if there is a field corresponding to the offset and if so, proceed
672 almost like if it was a component ref. */
673 }
674 }
675 return true;
676 }
677
678 /* If STMT can be proved to be an assignment to the virtual method table
679 pointer of ANALYZED_OBJ and the type associated with the new table
680 identified, return the type. Otherwise return NULL_TREE. */
681
682 static tree
683 extr_type_from_vtbl_ptr_store (gimple stmt, struct prop_type_change_info *tci)
684 {
685 HOST_WIDE_INT offset, size, max_size;
686 tree lhs, rhs, base, binfo;
687
688 if (!gimple_assign_single_p (stmt))
689 return NULL_TREE;
690
691 lhs = gimple_assign_lhs (stmt);
692 rhs = gimple_assign_rhs1 (stmt);
693 if (TREE_CODE (lhs) != COMPONENT_REF
694 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
695 return NULL_TREE;
696
697 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
698 if (offset != tci->offset
699 || size != POINTER_SIZE
700 || max_size != POINTER_SIZE)
701 return NULL_TREE;
702 if (TREE_CODE (base) == MEM_REF)
703 {
704 if (TREE_CODE (tci->object) != MEM_REF
705 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
706 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
707 TREE_OPERAND (base, 1)))
708 return NULL_TREE;
709 }
710 else if (tci->object != base)
711 return NULL_TREE;
712
713 binfo = vtable_pointer_value_to_binfo (rhs);
714
715 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
716 base of outer type. In this case we would need to either
717 work on binfos or translate it back to outer type and offset.
718 KNOWN_TYPE jump functions are not ready for that, yet. */
719 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
720 return NULL;
721
722 return BINFO_TYPE (binfo);
723 }
724
725 /* Callback of walk_aliased_vdefs and a helper function for
726 detect_type_change to check whether a particular statement may modify
727 the virtual table pointer, and if possible also determine the new type of
728 the (sub-)object. It stores its result into DATA, which points to a
729 prop_type_change_info structure. */
730
731 static bool
732 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
733 {
734 gimple stmt = SSA_NAME_DEF_STMT (vdef);
735 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
736
737 if (stmt_may_be_vtbl_ptr_store (stmt))
738 {
739 tree type;
740
741 type = extr_type_from_vtbl_ptr_store (stmt, tci);
742 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
743 if (tci->type_maybe_changed
744 && type != tci->known_current_type)
745 tci->multiple_types_encountered = true;
746 tci->known_current_type = type;
747 tci->type_maybe_changed = true;
748 return true;
749 }
750 else
751 return false;
752 }
753
754 /* See if ARG is PARAM_DECl describing instance passed by pointer
755 or reference in FUNCTION. Return false if the dynamic type may change
756 in between beggining of the function until CALL is invoked.
757
758 Generally functions are not allowed to change type of such instances,
759 but they call destructors. We assume that methods can not destroy the THIS
760 pointer. Also as a special cases, constructor and destructors may change
761 type of the THIS pointer. */
762
763 static bool
764 param_type_may_change_p (tree function, tree arg, gimple call)
765 {
766 /* Pure functions can not do any changes on the dynamic type;
767 that require writting to memory. */
768 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
769 return false;
770 /* We need to check if we are within inlined consturctor
771 or destructor (ideally we would have way to check that the
772 inline cdtor is actually working on ARG, but we don't have
773 easy tie on this, so punt on all non-pure cdtors.
774 We may also record the types of cdtors and once we know type
775 of the instance match them.
776
777 Also code unification optimizations may merge calls from
778 different blocks making return values unreliable. So
779 do nothing during late optimization. */
780 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
781 return true;
782 if (TREE_CODE (arg) == SSA_NAME
783 && SSA_NAME_IS_DEFAULT_DEF (arg)
784 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
785 {
786 /* Normal (non-THIS) argument. */
787 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
788 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
789 /* THIS pointer of an method - here we we want to watch constructors
790 and destructors as those definitely may change the dynamic
791 type. */
792 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
793 && !DECL_CXX_CONSTRUCTOR_P (function)
794 && !DECL_CXX_DESTRUCTOR_P (function)
795 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
796 {
797 /* Walk the inline stack and watch out for ctors/dtors. */
798 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
799 block = BLOCK_SUPERCONTEXT (block))
800 if (BLOCK_ABSTRACT_ORIGIN (block)
801 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
802 {
803 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
804
805 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
806 continue;
807 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
808 && (DECL_CXX_CONSTRUCTOR_P (fn)
809 || DECL_CXX_DESTRUCTOR_P (fn)))
810 return true;
811 }
812 return false;
813 }
814 }
815 return true;
816 }
817
818 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
819 callsite CALL) by looking for assignments to its virtual table pointer. If
820 it is, return true and fill in the jump function JFUNC with relevant type
821 information or set it to unknown. ARG is the object itself (not a pointer
822 to it, unless dereferenced). BASE is the base of the memory access as
823 returned by get_ref_base_and_extent, as is the offset.
824
825 This is helper function for detect_type_change and detect_type_change_ssa
826 that does the heavy work which is usually unnecesary. */
827
828 static bool
829 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
830 gimple call, struct ipa_jump_func *jfunc,
831 HOST_WIDE_INT offset)
832 {
833 struct prop_type_change_info tci;
834 ao_ref ao;
835 bool entry_reached = false;
836
837 gcc_checking_assert (DECL_P (arg)
838 || TREE_CODE (arg) == MEM_REF
839 || handled_component_p (arg));
840
841 comp_type = TYPE_MAIN_VARIANT (comp_type);
842
843 /* Const calls cannot call virtual methods through VMT and so type changes do
844 not matter. */
845 if (!flag_devirtualize || !gimple_vuse (call)
846 /* Be sure expected_type is polymorphic. */
847 || !comp_type
848 || TREE_CODE (comp_type) != RECORD_TYPE
849 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
850 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
851 return true;
852
853 ao_ref_init (&ao, arg);
854 ao.base = base;
855 ao.offset = offset;
856 ao.size = POINTER_SIZE;
857 ao.max_size = ao.size;
858
859 tci.offset = offset;
860 tci.object = get_base_address (arg);
861 tci.known_current_type = NULL_TREE;
862 tci.type_maybe_changed = false;
863 tci.multiple_types_encountered = false;
864
865 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
866 &tci, NULL, &entry_reached);
867 if (!tci.type_maybe_changed)
868 return false;
869
870 if (!tci.known_current_type
871 || tci.multiple_types_encountered
872 || offset != 0
873 /* When the walk reached function entry, it means that type
874 is set along some paths but not along others. */
875 || entry_reached)
876 jfunc->type = IPA_JF_UNKNOWN;
877 else
878 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
879
880 return true;
881 }
882
883 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
884 If it is, return true and fill in the jump function JFUNC with relevant type
885 information or set it to unknown. ARG is the object itself (not a pointer
886 to it, unless dereferenced). BASE is the base of the memory access as
887 returned by get_ref_base_and_extent, as is the offset. */
888
889 static bool
890 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
891 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
892 {
893 if (!flag_devirtualize)
894 return false;
895
896 if (TREE_CODE (base) == MEM_REF
897 && !param_type_may_change_p (current_function_decl,
898 TREE_OPERAND (base, 0),
899 call))
900 return false;
901 return detect_type_change_from_memory_writes (arg, base, comp_type,
902 call, jfunc, offset);
903 }
904
905 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
906 SSA name (its dereference will become the base and the offset is assumed to
907 be zero). */
908
909 static bool
910 detect_type_change_ssa (tree arg, tree comp_type,
911 gimple call, struct ipa_jump_func *jfunc)
912 {
913 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
914 if (!flag_devirtualize
915 || !POINTER_TYPE_P (TREE_TYPE (arg)))
916 return false;
917
918 if (!param_type_may_change_p (current_function_decl, arg, call))
919 return false;
920
921 arg = build2 (MEM_REF, ptr_type_node, arg,
922 build_int_cst (ptr_type_node, 0));
923
924 return detect_type_change_from_memory_writes (arg, arg, comp_type,
925 call, jfunc, 0);
926 }
927
928 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
929 boolean variable pointed to by DATA. */
930
931 static bool
932 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
933 void *data)
934 {
935 bool *b = (bool *) data;
936 *b = true;
937 return true;
938 }
939
940 /* Return true if we have already walked so many statements in AA that we
941 should really just start giving up. */
942
943 static bool
944 aa_overwalked (struct func_body_info *fbi)
945 {
946 gcc_checking_assert (fbi);
947 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
948 }
949
950 /* Find the nearest valid aa status for parameter specified by INDEX that
951 dominates BB. */
952
953 static struct param_aa_status *
954 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
955 int index)
956 {
957 while (true)
958 {
959 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
960 if (!bb)
961 return NULL;
962 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
963 if (!bi->param_aa_statuses.is_empty ()
964 && bi->param_aa_statuses[index].valid)
965 return &bi->param_aa_statuses[index];
966 }
967 }
968
969 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
970 structures and/or intialize the result with a dominating description as
971 necessary. */
972
973 static struct param_aa_status *
974 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
975 int index)
976 {
977 gcc_checking_assert (fbi);
978 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
979 if (bi->param_aa_statuses.is_empty ())
980 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
981 struct param_aa_status *paa = &bi->param_aa_statuses[index];
982 if (!paa->valid)
983 {
984 gcc_checking_assert (!paa->parm_modified
985 && !paa->ref_modified
986 && !paa->pt_modified);
987 struct param_aa_status *dom_paa;
988 dom_paa = find_dominating_aa_status (fbi, bb, index);
989 if (dom_paa)
990 *paa = *dom_paa;
991 else
992 paa->valid = true;
993 }
994
995 return paa;
996 }
997
998 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
999 a value known not to be modified in this function before reaching the
1000 statement STMT. FBI holds information about the function we have so far
1001 gathered but do not survive the summary building stage. */
1002
1003 static bool
1004 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
1005 gimple stmt, tree parm_load)
1006 {
1007 struct param_aa_status *paa;
1008 bool modified = false;
1009 ao_ref refd;
1010
1011 /* FIXME: FBI can be NULL if we are being called from outside
1012 ipa_node_analysis or ipcp_transform_function, which currently happens
1013 during inlining analysis. It would be great to extend fbi's lifetime and
1014 always have it. Currently, we are just not afraid of too much walking in
1015 that case. */
1016 if (fbi)
1017 {
1018 if (aa_overwalked (fbi))
1019 return false;
1020 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1021 if (paa->parm_modified)
1022 return false;
1023 }
1024 else
1025 paa = NULL;
1026
1027 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1028 ao_ref_init (&refd, parm_load);
1029 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1030 &modified, NULL);
1031 if (fbi)
1032 fbi->aa_walked += walked;
1033 if (paa && modified)
1034 paa->parm_modified = true;
1035 return !modified;
1036 }
1037
1038 /* If STMT is an assignment that loads a value from an parameter declaration,
1039 return the index of the parameter in ipa_node_params which has not been
1040 modified. Otherwise return -1. */
1041
1042 static int
1043 load_from_unmodified_param (struct func_body_info *fbi,
1044 vec<ipa_param_descriptor> descriptors,
1045 gimple stmt)
1046 {
1047 int index;
1048 tree op1;
1049
1050 if (!gimple_assign_single_p (stmt))
1051 return -1;
1052
1053 op1 = gimple_assign_rhs1 (stmt);
1054 if (TREE_CODE (op1) != PARM_DECL)
1055 return -1;
1056
1057 index = ipa_get_param_decl_index_1 (descriptors, op1);
1058 if (index < 0
1059 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1060 return -1;
1061
1062 return index;
1063 }
1064
1065 /* Return true if memory reference REF (which must be a load through parameter
1066 with INDEX) loads data that are known to be unmodified in this function
1067 before reaching statement STMT. */
1068
1069 static bool
1070 parm_ref_data_preserved_p (struct func_body_info *fbi,
1071 int index, gimple stmt, tree ref)
1072 {
1073 struct param_aa_status *paa;
1074 bool modified = false;
1075 ao_ref refd;
1076
1077 /* FIXME: FBI can be NULL if we are being called from outside
1078 ipa_node_analysis or ipcp_transform_function, which currently happens
1079 during inlining analysis. It would be great to extend fbi's lifetime and
1080 always have it. Currently, we are just not afraid of too much walking in
1081 that case. */
1082 if (fbi)
1083 {
1084 if (aa_overwalked (fbi))
1085 return false;
1086 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1087 if (paa->ref_modified)
1088 return false;
1089 }
1090 else
1091 paa = NULL;
1092
1093 gcc_checking_assert (gimple_vuse (stmt));
1094 ao_ref_init (&refd, ref);
1095 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1096 &modified, NULL);
1097 if (fbi)
1098 fbi->aa_walked += walked;
1099 if (paa && modified)
1100 paa->ref_modified = true;
1101 return !modified;
1102 }
1103
1104 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1105 is known to be unmodified in this function before reaching call statement
1106 CALL into which it is passed. FBI describes the function body. */
1107
1108 static bool
1109 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1110 gimple call, tree parm)
1111 {
1112 bool modified = false;
1113 ao_ref refd;
1114
1115 /* It's unnecessary to calculate anything about memory contnets for a const
1116 function because it is not goin to use it. But do not cache the result
1117 either. Also, no such calculations for non-pointers. */
1118 if (!gimple_vuse (call)
1119 || !POINTER_TYPE_P (TREE_TYPE (parm))
1120 || aa_overwalked (fbi))
1121 return false;
1122
1123 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1124 index);
1125 if (paa->pt_modified)
1126 return false;
1127
1128 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1129 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1130 &modified, NULL);
1131 fbi->aa_walked += walked;
1132 if (modified)
1133 paa->pt_modified = true;
1134 return !modified;
1135 }
1136
1137 /* Return true if we can prove that OP is a memory reference loading unmodified
1138 data from an aggregate passed as a parameter and if the aggregate is passed
1139 by reference, that the alias type of the load corresponds to the type of the
1140 formal parameter (so that we can rely on this type for TBAA in callers).
1141 INFO and PARMS_AINFO describe parameters of the current function (but the
1142 latter can be NULL), STMT is the load statement. If function returns true,
1143 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1144 within the aggregate and whether it is a load from a value passed by
1145 reference respectively. */
1146
1147 static bool
1148 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1149 vec<ipa_param_descriptor> descriptors,
1150 gimple stmt, tree op, int *index_p,
1151 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1152 bool *by_ref_p)
1153 {
1154 int index;
1155 HOST_WIDE_INT size, max_size;
1156 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1157
1158 if (max_size == -1 || max_size != size || *offset_p < 0)
1159 return false;
1160
1161 if (DECL_P (base))
1162 {
1163 int index = ipa_get_param_decl_index_1 (descriptors, base);
1164 if (index >= 0
1165 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1166 {
1167 *index_p = index;
1168 *by_ref_p = false;
1169 if (size_p)
1170 *size_p = size;
1171 return true;
1172 }
1173 return false;
1174 }
1175
1176 if (TREE_CODE (base) != MEM_REF
1177 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1178 || !integer_zerop (TREE_OPERAND (base, 1)))
1179 return false;
1180
1181 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1182 {
1183 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1184 index = ipa_get_param_decl_index_1 (descriptors, parm);
1185 }
1186 else
1187 {
1188 /* This branch catches situations where a pointer parameter is not a
1189 gimple register, for example:
1190
1191 void hip7(S*) (struct S * p)
1192 {
1193 void (*<T2e4>) (struct S *) D.1867;
1194 struct S * p.1;
1195
1196 <bb 2>:
1197 p.1_1 = p;
1198 D.1867_2 = p.1_1->f;
1199 D.1867_2 ();
1200 gdp = &p;
1201 */
1202
1203 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1204 index = load_from_unmodified_param (fbi, descriptors, def);
1205 }
1206
1207 if (index >= 0
1208 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1209 {
1210 *index_p = index;
1211 *by_ref_p = true;
1212 if (size_p)
1213 *size_p = size;
1214 return true;
1215 }
1216 return false;
1217 }
1218
1219 /* Just like the previous function, just without the param_analysis_info
1220 pointer, for users outside of this file. */
1221
1222 bool
1223 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1224 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1225 bool *by_ref_p)
1226 {
1227 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1228 offset_p, NULL, by_ref_p);
1229 }
1230
1231 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1232 of an assignment statement STMT, try to determine whether we are actually
1233 handling any of the following cases and construct an appropriate jump
1234 function into JFUNC if so:
1235
1236 1) The passed value is loaded from a formal parameter which is not a gimple
1237 register (most probably because it is addressable, the value has to be
1238 scalar) and we can guarantee the value has not changed. This case can
1239 therefore be described by a simple pass-through jump function. For example:
1240
1241 foo (int a)
1242 {
1243 int a.0;
1244
1245 a.0_2 = a;
1246 bar (a.0_2);
1247
1248 2) The passed value can be described by a simple arithmetic pass-through
1249 jump function. E.g.
1250
1251 foo (int a)
1252 {
1253 int D.2064;
1254
1255 D.2064_4 = a.1(D) + 4;
1256 bar (D.2064_4);
1257
1258 This case can also occur in combination of the previous one, e.g.:
1259
1260 foo (int a, int z)
1261 {
1262 int a.0;
1263 int D.2064;
1264
1265 a.0_3 = a;
1266 D.2064_4 = a.0_3 + 4;
1267 foo (D.2064_4);
1268
1269 3) The passed value is an address of an object within another one (which
1270 also passed by reference). Such situations are described by an ancestor
1271 jump function and describe situations such as:
1272
1273 B::foo() (struct B * const this)
1274 {
1275 struct A * D.1845;
1276
1277 D.1845_2 = &this_1(D)->D.1748;
1278 A::bar (D.1845_2);
1279
1280 INFO is the structure describing individual parameters access different
1281 stages of IPA optimizations. PARMS_AINFO contains the information that is
1282 only needed for intraprocedural analysis. */
1283
1284 static void
1285 compute_complex_assign_jump_func (struct func_body_info *fbi,
1286 struct ipa_node_params *info,
1287 struct ipa_jump_func *jfunc,
1288 gimple call, gimple stmt, tree name,
1289 tree param_type)
1290 {
1291 HOST_WIDE_INT offset, size, max_size;
1292 tree op1, tc_ssa, base, ssa;
1293 int index;
1294
1295 op1 = gimple_assign_rhs1 (stmt);
1296
1297 if (TREE_CODE (op1) == SSA_NAME)
1298 {
1299 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1300 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1301 else
1302 index = load_from_unmodified_param (fbi, info->descriptors,
1303 SSA_NAME_DEF_STMT (op1));
1304 tc_ssa = op1;
1305 }
1306 else
1307 {
1308 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1309 tc_ssa = gimple_assign_lhs (stmt);
1310 }
1311
1312 if (index >= 0)
1313 {
1314 tree op2 = gimple_assign_rhs2 (stmt);
1315
1316 if (op2)
1317 {
1318 if (!is_gimple_ip_invariant (op2)
1319 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1320 && !useless_type_conversion_p (TREE_TYPE (name),
1321 TREE_TYPE (op1))))
1322 return;
1323
1324 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1325 gimple_assign_rhs_code (stmt));
1326 }
1327 else if (gimple_assign_single_p (stmt))
1328 {
1329 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1330 bool type_p = false;
1331
1332 if (param_type && POINTER_TYPE_P (param_type))
1333 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1334 call, jfunc);
1335 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1336 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1337 }
1338 return;
1339 }
1340
1341 if (TREE_CODE (op1) != ADDR_EXPR)
1342 return;
1343 op1 = TREE_OPERAND (op1, 0);
1344 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1345 return;
1346 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1347 if (TREE_CODE (base) != MEM_REF
1348 /* If this is a varying address, punt. */
1349 || max_size == -1
1350 || max_size != size)
1351 return;
1352 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1353 ssa = TREE_OPERAND (base, 0);
1354 if (TREE_CODE (ssa) != SSA_NAME
1355 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1356 || offset < 0)
1357 return;
1358
1359 /* Dynamic types are changed in constructors and destructors. */
1360 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1361 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1362 {
1363 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1364 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1365 call, jfunc, offset));
1366 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1367 ipa_set_ancestor_jf (jfunc, offset,
1368 type_p ? TREE_TYPE (param_type) : NULL, index,
1369 parm_ref_data_pass_through_p (fbi, index,
1370 call, ssa), type_p);
1371 }
1372 }
1373
1374 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1375 it looks like:
1376
1377 iftmp.1_3 = &obj_2(D)->D.1762;
1378
1379 The base of the MEM_REF must be a default definition SSA NAME of a
1380 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1381 whole MEM_REF expression is returned and the offset calculated from any
1382 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1383 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1384
1385 static tree
1386 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1387 {
1388 HOST_WIDE_INT size, max_size;
1389 tree expr, parm, obj;
1390
1391 if (!gimple_assign_single_p (assign))
1392 return NULL_TREE;
1393 expr = gimple_assign_rhs1 (assign);
1394
1395 if (TREE_CODE (expr) != ADDR_EXPR)
1396 return NULL_TREE;
1397 expr = TREE_OPERAND (expr, 0);
1398 obj = expr;
1399 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1400
1401 if (TREE_CODE (expr) != MEM_REF
1402 /* If this is a varying address, punt. */
1403 || max_size == -1
1404 || max_size != size
1405 || *offset < 0)
1406 return NULL_TREE;
1407 parm = TREE_OPERAND (expr, 0);
1408 if (TREE_CODE (parm) != SSA_NAME
1409 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1410 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1411 return NULL_TREE;
1412
1413 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1414 *obj_p = obj;
1415 return expr;
1416 }
1417
1418
1419 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1420 statement PHI, try to find out whether NAME is in fact a
1421 multiple-inheritance typecast from a descendant into an ancestor of a formal
1422 parameter and thus can be described by an ancestor jump function and if so,
1423 write the appropriate function into JFUNC.
1424
1425 Essentially we want to match the following pattern:
1426
1427 if (obj_2(D) != 0B)
1428 goto <bb 3>;
1429 else
1430 goto <bb 4>;
1431
1432 <bb 3>:
1433 iftmp.1_3 = &obj_2(D)->D.1762;
1434
1435 <bb 4>:
1436 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1437 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1438 return D.1879_6; */
1439
1440 static void
1441 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1442 struct ipa_node_params *info,
1443 struct ipa_jump_func *jfunc,
1444 gimple call, gimple phi, tree param_type)
1445 {
1446 HOST_WIDE_INT offset;
1447 gimple assign, cond;
1448 basic_block phi_bb, assign_bb, cond_bb;
1449 tree tmp, parm, expr, obj;
1450 int index, i;
1451
1452 if (gimple_phi_num_args (phi) != 2)
1453 return;
1454
1455 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1456 tmp = PHI_ARG_DEF (phi, 0);
1457 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1458 tmp = PHI_ARG_DEF (phi, 1);
1459 else
1460 return;
1461 if (TREE_CODE (tmp) != SSA_NAME
1462 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1463 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1464 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1465 return;
1466
1467 assign = SSA_NAME_DEF_STMT (tmp);
1468 assign_bb = gimple_bb (assign);
1469 if (!single_pred_p (assign_bb))
1470 return;
1471 expr = get_ancestor_addr_info (assign, &obj, &offset);
1472 if (!expr)
1473 return;
1474 parm = TREE_OPERAND (expr, 0);
1475 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1476 if (index < 0)
1477 return;
1478
1479 cond_bb = single_pred (assign_bb);
1480 cond = last_stmt (cond_bb);
1481 if (!cond
1482 || gimple_code (cond) != GIMPLE_COND
1483 || gimple_cond_code (cond) != NE_EXPR
1484 || gimple_cond_lhs (cond) != parm
1485 || !integer_zerop (gimple_cond_rhs (cond)))
1486 return;
1487
1488 phi_bb = gimple_bb (phi);
1489 for (i = 0; i < 2; i++)
1490 {
1491 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1492 if (pred != assign_bb && pred != cond_bb)
1493 return;
1494 }
1495
1496 bool type_p = false;
1497 if (param_type && POINTER_TYPE_P (param_type)
1498 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
1499 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1500 call, jfunc, offset);
1501 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1502 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1503 index,
1504 parm_ref_data_pass_through_p (fbi, index, call, parm),
1505 type_p);
1506 }
1507
1508 /* Given OP which is passed as an actual argument to a called function,
1509 determine if it is possible to construct a KNOWN_TYPE jump function for it
1510 and if so, create one and store it to JFUNC.
1511 EXPECTED_TYPE represents a type the argument should be in */
1512
1513 static void
1514 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1515 gimple call, tree expected_type)
1516 {
1517 HOST_WIDE_INT offset, size, max_size;
1518 tree base;
1519
1520 if (!flag_devirtualize
1521 || TREE_CODE (op) != ADDR_EXPR
1522 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
1523 /* Be sure expected_type is polymorphic. */
1524 || !expected_type
1525 || !contains_polymorphic_type_p (expected_type))
1526 return;
1527
1528 op = TREE_OPERAND (op, 0);
1529 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1530 if (!DECL_P (base)
1531 || max_size == -1
1532 || max_size != size
1533 || !contains_polymorphic_type_p (TREE_TYPE (base)))
1534 return;
1535
1536 if (decl_maybe_in_construction_p (base, TREE_TYPE (base),
1537 call, current_function_decl)
1538 /* Even if the var seems to be in construction by inline call stack,
1539 we may work out the actual type by walking memory writes. */
1540 && (is_global_var (base)
1541 || detect_type_change (op, base, expected_type, call, jfunc, offset)))
1542 return;
1543
1544 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1545 expected_type);
1546 }
1547
1548 /* Inspect the given TYPE and return true iff it has the same structure (the
1549 same number of fields of the same types) as a C++ member pointer. If
1550 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1551 corresponding fields there. */
1552
1553 static bool
1554 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1555 {
1556 tree fld;
1557
1558 if (TREE_CODE (type) != RECORD_TYPE)
1559 return false;
1560
1561 fld = TYPE_FIELDS (type);
1562 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1563 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1564 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1565 return false;
1566
1567 if (method_ptr)
1568 *method_ptr = fld;
1569
1570 fld = DECL_CHAIN (fld);
1571 if (!fld || INTEGRAL_TYPE_P (fld)
1572 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1573 return false;
1574 if (delta)
1575 *delta = fld;
1576
1577 if (DECL_CHAIN (fld))
1578 return false;
1579
1580 return true;
1581 }
1582
1583 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1584 return the rhs of its defining statement. Otherwise return RHS as it
1585 is. */
1586
1587 static inline tree
1588 get_ssa_def_if_simple_copy (tree rhs)
1589 {
1590 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1591 {
1592 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1593
1594 if (gimple_assign_single_p (def_stmt))
1595 rhs = gimple_assign_rhs1 (def_stmt);
1596 else
1597 break;
1598 }
1599 return rhs;
1600 }
1601
1602 /* Simple linked list, describing known contents of an aggregate beforere
1603 call. */
1604
1605 struct ipa_known_agg_contents_list
1606 {
1607 /* Offset and size of the described part of the aggregate. */
1608 HOST_WIDE_INT offset, size;
1609 /* Known constant value or NULL if the contents is known to be unknown. */
1610 tree constant;
1611 /* Pointer to the next structure in the list. */
1612 struct ipa_known_agg_contents_list *next;
1613 };
1614
1615 /* Find the proper place in linked list of ipa_known_agg_contents_list
1616 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1617 unless there is a partial overlap, in which case return NULL, or such
1618 element is already there, in which case set *ALREADY_THERE to true. */
1619
1620 static struct ipa_known_agg_contents_list **
1621 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1622 HOST_WIDE_INT lhs_offset,
1623 HOST_WIDE_INT lhs_size,
1624 bool *already_there)
1625 {
1626 struct ipa_known_agg_contents_list **p = list;
1627 while (*p && (*p)->offset < lhs_offset)
1628 {
1629 if ((*p)->offset + (*p)->size > lhs_offset)
1630 return NULL;
1631 p = &(*p)->next;
1632 }
1633
1634 if (*p && (*p)->offset < lhs_offset + lhs_size)
1635 {
1636 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1637 /* We already know this value is subsequently overwritten with
1638 something else. */
1639 *already_there = true;
1640 else
1641 /* Otherwise this is a partial overlap which we cannot
1642 represent. */
1643 return NULL;
1644 }
1645 return p;
1646 }
1647
1648 /* Build aggregate jump function from LIST, assuming there are exactly
1649 CONST_COUNT constant entries there and that th offset of the passed argument
1650 is ARG_OFFSET and store it into JFUNC. */
1651
1652 static void
1653 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1654 int const_count, HOST_WIDE_INT arg_offset,
1655 struct ipa_jump_func *jfunc)
1656 {
1657 vec_alloc (jfunc->agg.items, const_count);
1658 while (list)
1659 {
1660 if (list->constant)
1661 {
1662 struct ipa_agg_jf_item item;
1663 item.offset = list->offset - arg_offset;
1664 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1665 item.value = unshare_expr_without_location (list->constant);
1666 jfunc->agg.items->quick_push (item);
1667 }
1668 list = list->next;
1669 }
1670 }
1671
1672 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1673 in ARG is filled in with constant values. ARG can either be an aggregate
1674 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1675 aggregate. JFUNC is the jump function into which the constants are
1676 subsequently stored. */
1677
1678 static void
1679 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1680 struct ipa_jump_func *jfunc)
1681 {
1682 struct ipa_known_agg_contents_list *list = NULL;
1683 int item_count = 0, const_count = 0;
1684 HOST_WIDE_INT arg_offset, arg_size;
1685 gimple_stmt_iterator gsi;
1686 tree arg_base;
1687 bool check_ref, by_ref;
1688 ao_ref r;
1689
1690 /* The function operates in three stages. First, we prepare check_ref, r,
1691 arg_base and arg_offset based on what is actually passed as an actual
1692 argument. */
1693
1694 if (POINTER_TYPE_P (arg_type))
1695 {
1696 by_ref = true;
1697 if (TREE_CODE (arg) == SSA_NAME)
1698 {
1699 tree type_size;
1700 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1701 return;
1702 check_ref = true;
1703 arg_base = arg;
1704 arg_offset = 0;
1705 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1706 arg_size = tree_to_uhwi (type_size);
1707 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1708 }
1709 else if (TREE_CODE (arg) == ADDR_EXPR)
1710 {
1711 HOST_WIDE_INT arg_max_size;
1712
1713 arg = TREE_OPERAND (arg, 0);
1714 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1715 &arg_max_size);
1716 if (arg_max_size == -1
1717 || arg_max_size != arg_size
1718 || arg_offset < 0)
1719 return;
1720 if (DECL_P (arg_base))
1721 {
1722 check_ref = false;
1723 ao_ref_init (&r, arg_base);
1724 }
1725 else
1726 return;
1727 }
1728 else
1729 return;
1730 }
1731 else
1732 {
1733 HOST_WIDE_INT arg_max_size;
1734
1735 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1736
1737 by_ref = false;
1738 check_ref = false;
1739 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1740 &arg_max_size);
1741 if (arg_max_size == -1
1742 || arg_max_size != arg_size
1743 || arg_offset < 0)
1744 return;
1745
1746 ao_ref_init (&r, arg);
1747 }
1748
1749 /* Second stage walks back the BB, looks at individual statements and as long
1750 as it is confident of how the statements affect contents of the
1751 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1752 describing it. */
1753 gsi = gsi_for_stmt (call);
1754 gsi_prev (&gsi);
1755 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1756 {
1757 struct ipa_known_agg_contents_list *n, **p;
1758 gimple stmt = gsi_stmt (gsi);
1759 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1760 tree lhs, rhs, lhs_base;
1761
1762 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1763 continue;
1764 if (!gimple_assign_single_p (stmt))
1765 break;
1766
1767 lhs = gimple_assign_lhs (stmt);
1768 rhs = gimple_assign_rhs1 (stmt);
1769 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1770 || TREE_CODE (lhs) == BIT_FIELD_REF
1771 || contains_bitfld_component_ref_p (lhs))
1772 break;
1773
1774 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1775 &lhs_max_size);
1776 if (lhs_max_size == -1
1777 || lhs_max_size != lhs_size)
1778 break;
1779
1780 if (check_ref)
1781 {
1782 if (TREE_CODE (lhs_base) != MEM_REF
1783 || TREE_OPERAND (lhs_base, 0) != arg_base
1784 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1785 break;
1786 }
1787 else if (lhs_base != arg_base)
1788 {
1789 if (DECL_P (lhs_base))
1790 continue;
1791 else
1792 break;
1793 }
1794
1795 bool already_there = false;
1796 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1797 &already_there);
1798 if (!p)
1799 break;
1800 if (already_there)
1801 continue;
1802
1803 rhs = get_ssa_def_if_simple_copy (rhs);
1804 n = XALLOCA (struct ipa_known_agg_contents_list);
1805 n->size = lhs_size;
1806 n->offset = lhs_offset;
1807 if (is_gimple_ip_invariant (rhs))
1808 {
1809 n->constant = rhs;
1810 const_count++;
1811 }
1812 else
1813 n->constant = NULL_TREE;
1814 n->next = *p;
1815 *p = n;
1816
1817 item_count++;
1818 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1819 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1820 break;
1821 }
1822
1823 /* Third stage just goes over the list and creates an appropriate vector of
1824 ipa_agg_jf_item structures out of it, of sourse only if there are
1825 any known constants to begin with. */
1826
1827 if (const_count)
1828 {
1829 jfunc->agg.by_ref = by_ref;
1830 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1831 }
1832 }
1833
1834 static tree
1835 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1836 {
1837 int n;
1838 tree type = (e->callee
1839 ? TREE_TYPE (e->callee->decl)
1840 : gimple_call_fntype (e->call_stmt));
1841 tree t = TYPE_ARG_TYPES (type);
1842
1843 for (n = 0; n < i; n++)
1844 {
1845 if (!t)
1846 break;
1847 t = TREE_CHAIN (t);
1848 }
1849 if (t)
1850 return TREE_VALUE (t);
1851 if (!e->callee)
1852 return NULL;
1853 t = DECL_ARGUMENTS (e->callee->decl);
1854 for (n = 0; n < i; n++)
1855 {
1856 if (!t)
1857 return NULL;
1858 t = TREE_CHAIN (t);
1859 }
1860 if (t)
1861 return TREE_TYPE (t);
1862 return NULL;
1863 }
1864
1865 /* Compute jump function for all arguments of callsite CS and insert the
1866 information in the jump_functions array in the ipa_edge_args corresponding
1867 to this callsite. */
1868
1869 static void
1870 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1871 struct cgraph_edge *cs)
1872 {
1873 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1874 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1875 gimple call = cs->call_stmt;
1876 int n, arg_num = gimple_call_num_args (call);
1877
1878 if (arg_num == 0 || args->jump_functions)
1879 return;
1880 vec_safe_grow_cleared (args->jump_functions, arg_num);
1881
1882 if (gimple_call_internal_p (call))
1883 return;
1884 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1885 return;
1886
1887 for (n = 0; n < arg_num; n++)
1888 {
1889 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1890 tree arg = gimple_call_arg (call, n);
1891 tree param_type = ipa_get_callee_param_type (cs, n);
1892
1893 if (is_gimple_ip_invariant (arg))
1894 ipa_set_jf_constant (jfunc, arg, cs);
1895 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1896 && TREE_CODE (arg) == PARM_DECL)
1897 {
1898 int index = ipa_get_param_decl_index (info, arg);
1899
1900 gcc_assert (index >=0);
1901 /* Aggregate passed by value, check for pass-through, otherwise we
1902 will attempt to fill in aggregate contents later in this
1903 for cycle. */
1904 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1905 {
1906 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1907 continue;
1908 }
1909 }
1910 else if (TREE_CODE (arg) == SSA_NAME)
1911 {
1912 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1913 {
1914 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1915 if (index >= 0)
1916 {
1917 bool agg_p, type_p;
1918 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1919 if (param_type && POINTER_TYPE_P (param_type))
1920 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1921 call, jfunc);
1922 else
1923 type_p = false;
1924 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1925 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1926 type_p);
1927 }
1928 }
1929 else
1930 {
1931 gimple stmt = SSA_NAME_DEF_STMT (arg);
1932 if (is_gimple_assign (stmt))
1933 compute_complex_assign_jump_func (fbi, info, jfunc,
1934 call, stmt, arg, param_type);
1935 else if (gimple_code (stmt) == GIMPLE_PHI)
1936 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1937 call, stmt, param_type);
1938 }
1939 }
1940 else
1941 compute_known_type_jump_func (arg, jfunc, call,
1942 param_type
1943 && POINTER_TYPE_P (param_type)
1944 ? TREE_TYPE (param_type)
1945 : NULL);
1946
1947 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1948 passed (because type conversions are ignored in gimple). Usually we can
1949 safely get type from function declaration, but in case of K&R prototypes or
1950 variadic functions we can try our luck with type of the pointer passed.
1951 TODO: Since we look for actual initialization of the memory object, we may better
1952 work out the type based on the memory stores we find. */
1953 if (!param_type)
1954 param_type = TREE_TYPE (arg);
1955
1956 if ((jfunc->type != IPA_JF_PASS_THROUGH
1957 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1958 && (jfunc->type != IPA_JF_ANCESTOR
1959 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1960 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1961 || POINTER_TYPE_P (param_type)))
1962 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1963 }
1964 }
1965
1966 /* Compute jump functions for all edges - both direct and indirect - outgoing
1967 from BB. */
1968
1969 static void
1970 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1971 {
1972 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1973 int i;
1974 struct cgraph_edge *cs;
1975
1976 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1977 {
1978 struct cgraph_node *callee = cs->callee;
1979
1980 if (callee)
1981 {
1982 callee->ultimate_alias_target ();
1983 /* We do not need to bother analyzing calls to unknown functions
1984 unless they may become known during lto/whopr. */
1985 if (!callee->definition && !flag_lto)
1986 continue;
1987 }
1988 ipa_compute_jump_functions_for_edge (fbi, cs);
1989 }
1990 }
1991
1992 /* If STMT looks like a statement loading a value from a member pointer formal
1993 parameter, return that parameter and store the offset of the field to
1994 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1995 might be clobbered). If USE_DELTA, then we look for a use of the delta
1996 field rather than the pfn. */
1997
1998 static tree
1999 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
2000 HOST_WIDE_INT *offset_p)
2001 {
2002 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2003
2004 if (!gimple_assign_single_p (stmt))
2005 return NULL_TREE;
2006
2007 rhs = gimple_assign_rhs1 (stmt);
2008 if (TREE_CODE (rhs) == COMPONENT_REF)
2009 {
2010 ref_field = TREE_OPERAND (rhs, 1);
2011 rhs = TREE_OPERAND (rhs, 0);
2012 }
2013 else
2014 ref_field = NULL_TREE;
2015 if (TREE_CODE (rhs) != MEM_REF)
2016 return NULL_TREE;
2017 rec = TREE_OPERAND (rhs, 0);
2018 if (TREE_CODE (rec) != ADDR_EXPR)
2019 return NULL_TREE;
2020 rec = TREE_OPERAND (rec, 0);
2021 if (TREE_CODE (rec) != PARM_DECL
2022 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2023 return NULL_TREE;
2024 ref_offset = TREE_OPERAND (rhs, 1);
2025
2026 if (use_delta)
2027 fld = delta_field;
2028 else
2029 fld = ptr_field;
2030 if (offset_p)
2031 *offset_p = int_bit_position (fld);
2032
2033 if (ref_field)
2034 {
2035 if (integer_nonzerop (ref_offset))
2036 return NULL_TREE;
2037 return ref_field == fld ? rec : NULL_TREE;
2038 }
2039 else
2040 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2041 : NULL_TREE;
2042 }
2043
2044 /* Returns true iff T is an SSA_NAME defined by a statement. */
2045
2046 static bool
2047 ipa_is_ssa_with_stmt_def (tree t)
2048 {
2049 if (TREE_CODE (t) == SSA_NAME
2050 && !SSA_NAME_IS_DEFAULT_DEF (t))
2051 return true;
2052 else
2053 return false;
2054 }
2055
2056 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2057 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2058 indirect call graph edge. */
2059
2060 static struct cgraph_edge *
2061 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
2062 {
2063 struct cgraph_edge *cs;
2064
2065 cs = node->get_edge (stmt);
2066 cs->indirect_info->param_index = param_index;
2067 cs->indirect_info->agg_contents = 0;
2068 cs->indirect_info->member_ptr = 0;
2069 return cs;
2070 }
2071
2072 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2073 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2074 intermediate information about each formal parameter. Currently it checks
2075 whether the call calls a pointer that is a formal parameter and if so, the
2076 parameter is marked with the called flag and an indirect call graph edge
2077 describing the call is created. This is very simple for ordinary pointers
2078 represented in SSA but not-so-nice when it comes to member pointers. The
2079 ugly part of this function does nothing more than trying to match the
2080 pattern of such a call. An example of such a pattern is the gimple dump
2081 below, the call is on the last line:
2082
2083 <bb 2>:
2084 f$__delta_5 = f.__delta;
2085 f$__pfn_24 = f.__pfn;
2086
2087 or
2088 <bb 2>:
2089 f$__delta_5 = MEM[(struct *)&f];
2090 f$__pfn_24 = MEM[(struct *)&f + 4B];
2091
2092 and a few lines below:
2093
2094 <bb 5>
2095 D.2496_3 = (int) f$__pfn_24;
2096 D.2497_4 = D.2496_3 & 1;
2097 if (D.2497_4 != 0)
2098 goto <bb 3>;
2099 else
2100 goto <bb 4>;
2101
2102 <bb 6>:
2103 D.2500_7 = (unsigned int) f$__delta_5;
2104 D.2501_8 = &S + D.2500_7;
2105 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2106 D.2503_10 = *D.2502_9;
2107 D.2504_12 = f$__pfn_24 + -1;
2108 D.2505_13 = (unsigned int) D.2504_12;
2109 D.2506_14 = D.2503_10 + D.2505_13;
2110 D.2507_15 = *D.2506_14;
2111 iftmp.11_16 = (String:: *) D.2507_15;
2112
2113 <bb 7>:
2114 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2115 D.2500_19 = (unsigned int) f$__delta_5;
2116 D.2508_20 = &S + D.2500_19;
2117 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2118
2119 Such patterns are results of simple calls to a member pointer:
2120
2121 int doprinting (int (MyString::* f)(int) const)
2122 {
2123 MyString S ("somestring");
2124
2125 return (S.*f)(4);
2126 }
2127
2128 Moreover, the function also looks for called pointers loaded from aggregates
2129 passed by value or reference. */
2130
2131 static void
2132 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2133 tree target)
2134 {
2135 struct ipa_node_params *info = fbi->info;
2136 HOST_WIDE_INT offset;
2137 bool by_ref;
2138
2139 if (SSA_NAME_IS_DEFAULT_DEF (target))
2140 {
2141 tree var = SSA_NAME_VAR (target);
2142 int index = ipa_get_param_decl_index (info, var);
2143 if (index >= 0)
2144 ipa_note_param_call (fbi->node, index, call);
2145 return;
2146 }
2147
2148 int index;
2149 gimple def = SSA_NAME_DEF_STMT (target);
2150 if (gimple_assign_single_p (def)
2151 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2152 gimple_assign_rhs1 (def), &index, &offset,
2153 NULL, &by_ref))
2154 {
2155 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2156 if (cs->indirect_info->offset != offset)
2157 cs->indirect_info->outer_type = NULL;
2158 cs->indirect_info->offset = offset;
2159 cs->indirect_info->agg_contents = 1;
2160 cs->indirect_info->by_ref = by_ref;
2161 return;
2162 }
2163
2164 /* Now we need to try to match the complex pattern of calling a member
2165 pointer. */
2166 if (gimple_code (def) != GIMPLE_PHI
2167 || gimple_phi_num_args (def) != 2
2168 || !POINTER_TYPE_P (TREE_TYPE (target))
2169 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2170 return;
2171
2172 /* First, we need to check whether one of these is a load from a member
2173 pointer that is a parameter to this function. */
2174 tree n1 = PHI_ARG_DEF (def, 0);
2175 tree n2 = PHI_ARG_DEF (def, 1);
2176 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2177 return;
2178 gimple d1 = SSA_NAME_DEF_STMT (n1);
2179 gimple d2 = SSA_NAME_DEF_STMT (n2);
2180
2181 tree rec;
2182 basic_block bb, virt_bb;
2183 basic_block join = gimple_bb (def);
2184 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2185 {
2186 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2187 return;
2188
2189 bb = EDGE_PRED (join, 0)->src;
2190 virt_bb = gimple_bb (d2);
2191 }
2192 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2193 {
2194 bb = EDGE_PRED (join, 1)->src;
2195 virt_bb = gimple_bb (d1);
2196 }
2197 else
2198 return;
2199
2200 /* Second, we need to check that the basic blocks are laid out in the way
2201 corresponding to the pattern. */
2202
2203 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2204 || single_pred (virt_bb) != bb
2205 || single_succ (virt_bb) != join)
2206 return;
2207
2208 /* Third, let's see that the branching is done depending on the least
2209 significant bit of the pfn. */
2210
2211 gimple branch = last_stmt (bb);
2212 if (!branch || gimple_code (branch) != GIMPLE_COND)
2213 return;
2214
2215 if ((gimple_cond_code (branch) != NE_EXPR
2216 && gimple_cond_code (branch) != EQ_EXPR)
2217 || !integer_zerop (gimple_cond_rhs (branch)))
2218 return;
2219
2220 tree cond = gimple_cond_lhs (branch);
2221 if (!ipa_is_ssa_with_stmt_def (cond))
2222 return;
2223
2224 def = SSA_NAME_DEF_STMT (cond);
2225 if (!is_gimple_assign (def)
2226 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2227 || !integer_onep (gimple_assign_rhs2 (def)))
2228 return;
2229
2230 cond = gimple_assign_rhs1 (def);
2231 if (!ipa_is_ssa_with_stmt_def (cond))
2232 return;
2233
2234 def = SSA_NAME_DEF_STMT (cond);
2235
2236 if (is_gimple_assign (def)
2237 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2238 {
2239 cond = gimple_assign_rhs1 (def);
2240 if (!ipa_is_ssa_with_stmt_def (cond))
2241 return;
2242 def = SSA_NAME_DEF_STMT (cond);
2243 }
2244
2245 tree rec2;
2246 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2247 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2248 == ptrmemfunc_vbit_in_delta),
2249 NULL);
2250 if (rec != rec2)
2251 return;
2252
2253 index = ipa_get_param_decl_index (info, rec);
2254 if (index >= 0
2255 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2256 {
2257 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2258 if (cs->indirect_info->offset != offset)
2259 cs->indirect_info->outer_type = NULL;
2260 cs->indirect_info->offset = offset;
2261 cs->indirect_info->agg_contents = 1;
2262 cs->indirect_info->member_ptr = 1;
2263 }
2264
2265 return;
2266 }
2267
2268 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2269 object referenced in the expression is a formal parameter of the caller
2270 FBI->node (described by FBI->info), create a call note for the
2271 statement. */
2272
2273 static void
2274 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2275 gimple call, tree target)
2276 {
2277 tree obj = OBJ_TYPE_REF_OBJECT (target);
2278 int index;
2279 HOST_WIDE_INT anc_offset;
2280
2281 if (!flag_devirtualize)
2282 return;
2283
2284 if (TREE_CODE (obj) != SSA_NAME)
2285 return;
2286
2287 struct ipa_node_params *info = fbi->info;
2288 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2289 {
2290 struct ipa_jump_func jfunc;
2291 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2292 return;
2293
2294 anc_offset = 0;
2295 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2296 gcc_assert (index >= 0);
2297 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2298 call, &jfunc))
2299 return;
2300 }
2301 else
2302 {
2303 struct ipa_jump_func jfunc;
2304 gimple stmt = SSA_NAME_DEF_STMT (obj);
2305 tree expr;
2306
2307 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2308 if (!expr)
2309 return;
2310 index = ipa_get_param_decl_index (info,
2311 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2312 gcc_assert (index >= 0);
2313 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2314 call, &jfunc, anc_offset))
2315 return;
2316 }
2317
2318 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2319 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2320 ii->offset = anc_offset;
2321 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2322 ii->otr_type = obj_type_ref_class (target);
2323 ii->polymorphic = 1;
2324 }
2325
2326 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2327 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2328 containing intermediate information about each formal parameter. */
2329
2330 static void
2331 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2332 {
2333 tree target = gimple_call_fn (call);
2334
2335 if (!target
2336 || (TREE_CODE (target) != SSA_NAME
2337 && !virtual_method_call_p (target)))
2338 return;
2339
2340 struct cgraph_edge *cs = fbi->node->get_edge (call);
2341 /* If we previously turned the call into a direct call, there is
2342 no need to analyze. */
2343 if (cs && !cs->indirect_unknown_callee)
2344 return;
2345
2346 if (cs->indirect_info->polymorphic)
2347 {
2348 tree otr_type;
2349 HOST_WIDE_INT otr_token;
2350 tree instance;
2351 tree target = gimple_call_fn (call);
2352 ipa_polymorphic_call_context context (current_function_decl,
2353 target, call, &instance);
2354
2355 otr_type = obj_type_ref_class (target);
2356 otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2357
2358 if (context.get_dynamic_type (instance,
2359 OBJ_TYPE_REF_OBJECT (target),
2360 otr_type, call))
2361 {
2362 gcc_assert (TREE_CODE (otr_type) == RECORD_TYPE);
2363 cs->indirect_info->polymorphic = true;
2364 cs->indirect_info->param_index = -1;
2365 cs->indirect_info->otr_token = otr_token;
2366 cs->indirect_info->otr_type = otr_type;
2367 cs->indirect_info->outer_type = context.outer_type;
2368 cs->indirect_info->speculative_outer_type = context.speculative_outer_type;
2369 cs->indirect_info->offset = context.offset;
2370 cs->indirect_info->speculative_offset = context.speculative_offset;
2371 cs->indirect_info->maybe_in_construction
2372 = context.maybe_in_construction;
2373 cs->indirect_info->maybe_derived_type = context.maybe_derived_type;
2374 cs->indirect_info->speculative_maybe_derived_type
2375 = context.speculative_maybe_derived_type;
2376 }
2377 }
2378
2379 if (TREE_CODE (target) == SSA_NAME)
2380 ipa_analyze_indirect_call_uses (fbi, call, target);
2381 else if (virtual_method_call_p (target))
2382 ipa_analyze_virtual_call_uses (fbi, call, target);
2383 }
2384
2385
2386 /* Analyze the call statement STMT with respect to formal parameters (described
2387 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2388 formal parameters are called. */
2389
2390 static void
2391 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2392 {
2393 if (is_gimple_call (stmt))
2394 ipa_analyze_call_uses (fbi, stmt);
2395 }
2396
2397 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2398 If OP is a parameter declaration, mark it as used in the info structure
2399 passed in DATA. */
2400
2401 static bool
2402 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2403 {
2404 struct ipa_node_params *info = (struct ipa_node_params *) data;
2405
2406 op = get_base_address (op);
2407 if (op
2408 && TREE_CODE (op) == PARM_DECL)
2409 {
2410 int index = ipa_get_param_decl_index (info, op);
2411 gcc_assert (index >= 0);
2412 ipa_set_param_used (info, index, true);
2413 }
2414
2415 return false;
2416 }
2417
2418 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2419 the findings in various structures of the associated ipa_node_params
2420 structure, such as parameter flags, notes etc. FBI holds various data about
2421 the function being analyzed. */
2422
2423 static void
2424 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2425 {
2426 gimple_stmt_iterator gsi;
2427 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2428 {
2429 gimple stmt = gsi_stmt (gsi);
2430
2431 if (is_gimple_debug (stmt))
2432 continue;
2433
2434 ipa_analyze_stmt_uses (fbi, stmt);
2435 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2436 visit_ref_for_mod_analysis,
2437 visit_ref_for_mod_analysis,
2438 visit_ref_for_mod_analysis);
2439 }
2440 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2441 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2442 visit_ref_for_mod_analysis,
2443 visit_ref_for_mod_analysis,
2444 visit_ref_for_mod_analysis);
2445 }
2446
2447 /* Calculate controlled uses of parameters of NODE. */
2448
2449 static void
2450 ipa_analyze_controlled_uses (struct cgraph_node *node)
2451 {
2452 struct ipa_node_params *info = IPA_NODE_REF (node);
2453
2454 for (int i = 0; i < ipa_get_param_count (info); i++)
2455 {
2456 tree parm = ipa_get_param (info, i);
2457 int controlled_uses = 0;
2458
2459 /* For SSA regs see if parameter is used. For non-SSA we compute
2460 the flag during modification analysis. */
2461 if (is_gimple_reg (parm))
2462 {
2463 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2464 parm);
2465 if (ddef && !has_zero_uses (ddef))
2466 {
2467 imm_use_iterator imm_iter;
2468 use_operand_p use_p;
2469
2470 ipa_set_param_used (info, i, true);
2471 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2472 if (!is_gimple_call (USE_STMT (use_p)))
2473 {
2474 if (!is_gimple_debug (USE_STMT (use_p)))
2475 {
2476 controlled_uses = IPA_UNDESCRIBED_USE;
2477 break;
2478 }
2479 }
2480 else
2481 controlled_uses++;
2482 }
2483 else
2484 controlled_uses = 0;
2485 }
2486 else
2487 controlled_uses = IPA_UNDESCRIBED_USE;
2488 ipa_set_controlled_uses (info, i, controlled_uses);
2489 }
2490 }
2491
2492 /* Free stuff in BI. */
2493
2494 static void
2495 free_ipa_bb_info (struct ipa_bb_info *bi)
2496 {
2497 bi->cg_edges.release ();
2498 bi->param_aa_statuses.release ();
2499 }
2500
2501 /* Dominator walker driving the analysis. */
2502
2503 class analysis_dom_walker : public dom_walker
2504 {
2505 public:
2506 analysis_dom_walker (struct func_body_info *fbi)
2507 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2508
2509 virtual void before_dom_children (basic_block);
2510
2511 private:
2512 struct func_body_info *m_fbi;
2513 };
2514
2515 void
2516 analysis_dom_walker::before_dom_children (basic_block bb)
2517 {
2518 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2519 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2520 }
2521
2522 /* Initialize the array describing properties of of formal parameters
2523 of NODE, analyze their uses and compute jump functions associated
2524 with actual arguments of calls from within NODE. */
2525
2526 void
2527 ipa_analyze_node (struct cgraph_node *node)
2528 {
2529 struct func_body_info fbi;
2530 struct ipa_node_params *info;
2531
2532 ipa_check_create_node_params ();
2533 ipa_check_create_edge_args ();
2534 info = IPA_NODE_REF (node);
2535
2536 if (info->analysis_done)
2537 return;
2538 info->analysis_done = 1;
2539
2540 if (ipa_func_spec_opts_forbid_analysis_p (node))
2541 {
2542 for (int i = 0; i < ipa_get_param_count (info); i++)
2543 {
2544 ipa_set_param_used (info, i, true);
2545 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2546 }
2547 return;
2548 }
2549
2550 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2551 push_cfun (func);
2552 calculate_dominance_info (CDI_DOMINATORS);
2553 ipa_initialize_node_params (node);
2554 ipa_analyze_controlled_uses (node);
2555
2556 fbi.node = node;
2557 fbi.info = IPA_NODE_REF (node);
2558 fbi.bb_infos = vNULL;
2559 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2560 fbi.param_count = ipa_get_param_count (info);
2561 fbi.aa_walked = 0;
2562
2563 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2564 {
2565 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2566 bi->cg_edges.safe_push (cs);
2567 }
2568
2569 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2570 {
2571 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2572 bi->cg_edges.safe_push (cs);
2573 }
2574
2575 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2576
2577 int i;
2578 struct ipa_bb_info *bi;
2579 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2580 free_ipa_bb_info (bi);
2581 fbi.bb_infos.release ();
2582 free_dominance_info (CDI_DOMINATORS);
2583 pop_cfun ();
2584 }
2585
2586 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2587 attempt a type-based devirtualization. If successful, return the
2588 target function declaration, otherwise return NULL. */
2589
2590 tree
2591 ipa_intraprocedural_devirtualization (gimple call)
2592 {
2593 tree binfo, token, fndecl;
2594 struct ipa_jump_func jfunc;
2595 tree otr = gimple_call_fn (call);
2596
2597 jfunc.type = IPA_JF_UNKNOWN;
2598 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2599 call, obj_type_ref_class (otr));
2600 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2601 return NULL_TREE;
2602 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2603 if (!binfo)
2604 return NULL_TREE;
2605 token = OBJ_TYPE_REF_TOKEN (otr);
2606 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2607 binfo);
2608 #ifdef ENABLE_CHECKING
2609 if (fndecl)
2610 gcc_assert (possible_polymorphic_call_target_p
2611 (otr, call, cgraph_node::get (fndecl)));
2612 #endif
2613 return fndecl;
2614 }
2615
2616 /* Update the jump function DST when the call graph edge corresponding to SRC is
2617 is being inlined, knowing that DST is of type ancestor and src of known
2618 type. */
2619
2620 static void
2621 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2622 struct ipa_jump_func *dst)
2623 {
2624 HOST_WIDE_INT combined_offset;
2625 tree combined_type;
2626
2627 if (!ipa_get_jf_ancestor_type_preserved (dst))
2628 {
2629 dst->type = IPA_JF_UNKNOWN;
2630 return;
2631 }
2632
2633 combined_offset = ipa_get_jf_known_type_offset (src)
2634 + ipa_get_jf_ancestor_offset (dst);
2635 combined_type = ipa_get_jf_ancestor_type (dst);
2636
2637 ipa_set_jf_known_type (dst, combined_offset,
2638 ipa_get_jf_known_type_base_type (src),
2639 combined_type);
2640 }
2641
2642 /* Update the jump functions associated with call graph edge E when the call
2643 graph edge CS is being inlined, assuming that E->caller is already (possibly
2644 indirectly) inlined into CS->callee and that E has not been inlined. */
2645
2646 static void
2647 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2648 struct cgraph_edge *e)
2649 {
2650 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2651 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2652 int count = ipa_get_cs_argument_count (args);
2653 int i;
2654
2655 for (i = 0; i < count; i++)
2656 {
2657 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2658
2659 if (dst->type == IPA_JF_ANCESTOR)
2660 {
2661 struct ipa_jump_func *src;
2662 int dst_fid = dst->value.ancestor.formal_id;
2663
2664 /* Variable number of arguments can cause havoc if we try to access
2665 one that does not exist in the inlined edge. So make sure we
2666 don't. */
2667 if (dst_fid >= ipa_get_cs_argument_count (top))
2668 {
2669 dst->type = IPA_JF_UNKNOWN;
2670 continue;
2671 }
2672
2673 src = ipa_get_ith_jump_func (top, dst_fid);
2674
2675 if (src->agg.items
2676 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2677 {
2678 struct ipa_agg_jf_item *item;
2679 int j;
2680
2681 /* Currently we do not produce clobber aggregate jump functions,
2682 replace with merging when we do. */
2683 gcc_assert (!dst->agg.items);
2684
2685 dst->agg.items = vec_safe_copy (src->agg.items);
2686 dst->agg.by_ref = src->agg.by_ref;
2687 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2688 item->offset -= dst->value.ancestor.offset;
2689 }
2690
2691 if (src->type == IPA_JF_KNOWN_TYPE)
2692 combine_known_type_and_ancestor_jfs (src, dst);
2693 else if (src->type == IPA_JF_PASS_THROUGH
2694 && src->value.pass_through.operation == NOP_EXPR)
2695 {
2696 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2697 dst->value.ancestor.agg_preserved &=
2698 src->value.pass_through.agg_preserved;
2699 dst->value.ancestor.type_preserved &=
2700 src->value.pass_through.type_preserved;
2701 }
2702 else if (src->type == IPA_JF_ANCESTOR)
2703 {
2704 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2705 dst->value.ancestor.offset += src->value.ancestor.offset;
2706 dst->value.ancestor.agg_preserved &=
2707 src->value.ancestor.agg_preserved;
2708 dst->value.ancestor.type_preserved &=
2709 src->value.ancestor.type_preserved;
2710 }
2711 else
2712 dst->type = IPA_JF_UNKNOWN;
2713 }
2714 else if (dst->type == IPA_JF_PASS_THROUGH)
2715 {
2716 struct ipa_jump_func *src;
2717 /* We must check range due to calls with variable number of arguments
2718 and we cannot combine jump functions with operations. */
2719 if (dst->value.pass_through.operation == NOP_EXPR
2720 && (dst->value.pass_through.formal_id
2721 < ipa_get_cs_argument_count (top)))
2722 {
2723 int dst_fid = dst->value.pass_through.formal_id;
2724 src = ipa_get_ith_jump_func (top, dst_fid);
2725 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2726
2727 switch (src->type)
2728 {
2729 case IPA_JF_UNKNOWN:
2730 dst->type = IPA_JF_UNKNOWN;
2731 break;
2732 case IPA_JF_KNOWN_TYPE:
2733 if (ipa_get_jf_pass_through_type_preserved (dst))
2734 ipa_set_jf_known_type (dst,
2735 ipa_get_jf_known_type_offset (src),
2736 ipa_get_jf_known_type_base_type (src),
2737 ipa_get_jf_known_type_component_type (src));
2738 else
2739 dst->type = IPA_JF_UNKNOWN;
2740 break;
2741 case IPA_JF_CONST:
2742 ipa_set_jf_cst_copy (dst, src);
2743 break;
2744
2745 case IPA_JF_PASS_THROUGH:
2746 {
2747 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2748 enum tree_code operation;
2749 operation = ipa_get_jf_pass_through_operation (src);
2750
2751 if (operation == NOP_EXPR)
2752 {
2753 bool agg_p, type_p;
2754 agg_p = dst_agg_p
2755 && ipa_get_jf_pass_through_agg_preserved (src);
2756 type_p = ipa_get_jf_pass_through_type_preserved (src)
2757 && ipa_get_jf_pass_through_type_preserved (dst);
2758 ipa_set_jf_simple_pass_through (dst, formal_id,
2759 agg_p, type_p);
2760 }
2761 else
2762 {
2763 tree operand = ipa_get_jf_pass_through_operand (src);
2764 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2765 operation);
2766 }
2767 break;
2768 }
2769 case IPA_JF_ANCESTOR:
2770 {
2771 bool agg_p, type_p;
2772 agg_p = dst_agg_p
2773 && ipa_get_jf_ancestor_agg_preserved (src);
2774 type_p = ipa_get_jf_ancestor_type_preserved (src)
2775 && ipa_get_jf_pass_through_type_preserved (dst);
2776 ipa_set_ancestor_jf (dst,
2777 ipa_get_jf_ancestor_offset (src),
2778 ipa_get_jf_ancestor_type (src),
2779 ipa_get_jf_ancestor_formal_id (src),
2780 agg_p, type_p);
2781 break;
2782 }
2783 default:
2784 gcc_unreachable ();
2785 }
2786
2787 if (src->agg.items
2788 && (dst_agg_p || !src->agg.by_ref))
2789 {
2790 /* Currently we do not produce clobber aggregate jump
2791 functions, replace with merging when we do. */
2792 gcc_assert (!dst->agg.items);
2793
2794 dst->agg.by_ref = src->agg.by_ref;
2795 dst->agg.items = vec_safe_copy (src->agg.items);
2796 }
2797 }
2798 else
2799 dst->type = IPA_JF_UNKNOWN;
2800 }
2801 }
2802 }
2803
2804 /* If TARGET is an addr_expr of a function declaration, make it the destination
2805 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2806
2807 struct cgraph_edge *
2808 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2809 {
2810 struct cgraph_node *callee;
2811 struct inline_edge_summary *es = inline_edge_summary (ie);
2812 bool unreachable = false;
2813
2814 if (TREE_CODE (target) == ADDR_EXPR)
2815 target = TREE_OPERAND (target, 0);
2816 if (TREE_CODE (target) != FUNCTION_DECL)
2817 {
2818 target = canonicalize_constructor_val (target, NULL);
2819 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2820 {
2821 if (ie->indirect_info->member_ptr)
2822 /* Member pointer call that goes through a VMT lookup. */
2823 return NULL;
2824
2825 if (dump_enabled_p ())
2826 {
2827 location_t loc = gimple_location_safe (ie->call_stmt);
2828 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2829 "discovered direct call to non-function in %s/%i, "
2830 "making it __builtin_unreachable\n",
2831 ie->caller->name (), ie->caller->order);
2832 }
2833
2834 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2835 callee = cgraph_node::get_create (target);
2836 unreachable = true;
2837 }
2838 else
2839 callee = cgraph_node::get (target);
2840 }
2841 else
2842 callee = cgraph_node::get (target);
2843
2844 /* Because may-edges are not explicitely represented and vtable may be external,
2845 we may create the first reference to the object in the unit. */
2846 if (!callee || callee->global.inlined_to)
2847 {
2848
2849 /* We are better to ensure we can refer to it.
2850 In the case of static functions we are out of luck, since we already
2851 removed its body. In the case of public functions we may or may
2852 not introduce the reference. */
2853 if (!canonicalize_constructor_val (target, NULL)
2854 || !TREE_PUBLIC (target))
2855 {
2856 if (dump_file)
2857 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2858 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2859 xstrdup (ie->caller->name ()),
2860 ie->caller->order,
2861 xstrdup (ie->callee->name ()),
2862 ie->callee->order);
2863 return NULL;
2864 }
2865 callee = cgraph_node::get_create (target);
2866 }
2867
2868 if (!dbg_cnt (devirt))
2869 return NULL;
2870
2871 ipa_check_create_node_params ();
2872
2873 /* We can not make edges to inline clones. It is bug that someone removed
2874 the cgraph node too early. */
2875 gcc_assert (!callee->global.inlined_to);
2876
2877 if (dump_file && !unreachable)
2878 {
2879 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2880 "(%s/%i -> %s/%i), for stmt ",
2881 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2882 xstrdup (ie->caller->name ()),
2883 ie->caller->order,
2884 xstrdup (callee->name ()),
2885 callee->order);
2886 if (ie->call_stmt)
2887 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2888 else
2889 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2890 }
2891 if (dump_enabled_p ())
2892 {
2893 location_t loc = gimple_location_safe (ie->call_stmt);
2894
2895 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2896 "converting indirect call in %s to direct call to %s\n",
2897 ie->caller->name (), callee->name ());
2898 }
2899 ie = ie->make_direct (callee);
2900 es = inline_edge_summary (ie);
2901 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2902 - eni_size_weights.call_cost);
2903 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2904 - eni_time_weights.call_cost);
2905
2906 return ie;
2907 }
2908
2909 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2910 return NULL if there is not any. BY_REF specifies whether the value has to
2911 be passed by reference or by value. */
2912
2913 tree
2914 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2915 HOST_WIDE_INT offset, bool by_ref)
2916 {
2917 struct ipa_agg_jf_item *item;
2918 int i;
2919
2920 if (by_ref != agg->by_ref)
2921 return NULL;
2922
2923 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2924 if (item->offset == offset)
2925 {
2926 /* Currently we do not have clobber values, return NULL for them once
2927 we do. */
2928 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2929 return item->value;
2930 }
2931 return NULL;
2932 }
2933
2934 /* Remove a reference to SYMBOL from the list of references of a node given by
2935 reference description RDESC. Return true if the reference has been
2936 successfully found and removed. */
2937
2938 static bool
2939 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2940 {
2941 struct ipa_ref *to_del;
2942 struct cgraph_edge *origin;
2943
2944 origin = rdesc->cs;
2945 if (!origin)
2946 return false;
2947 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2948 origin->lto_stmt_uid);
2949 if (!to_del)
2950 return false;
2951
2952 to_del->remove_reference ();
2953 if (dump_file)
2954 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2955 xstrdup (origin->caller->name ()),
2956 origin->caller->order, xstrdup (symbol->name ()));
2957 return true;
2958 }
2959
2960 /* If JFUNC has a reference description with refcount different from
2961 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2962 NULL. JFUNC must be a constant jump function. */
2963
2964 static struct ipa_cst_ref_desc *
2965 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2966 {
2967 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2968 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2969 return rdesc;
2970 else
2971 return NULL;
2972 }
2973
2974 /* If the value of constant jump function JFUNC is an address of a function
2975 declaration, return the associated call graph node. Otherwise return
2976 NULL. */
2977
2978 static cgraph_node *
2979 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2980 {
2981 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2982 tree cst = ipa_get_jf_constant (jfunc);
2983 if (TREE_CODE (cst) != ADDR_EXPR
2984 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2985 return NULL;
2986
2987 return cgraph_node::get (TREE_OPERAND (cst, 0));
2988 }
2989
2990
2991 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2992 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2993 the edge specified in the rdesc. Return false if either the symbol or the
2994 reference could not be found, otherwise return true. */
2995
2996 static bool
2997 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2998 {
2999 struct ipa_cst_ref_desc *rdesc;
3000 if (jfunc->type == IPA_JF_CONST
3001 && (rdesc = jfunc_rdesc_usable (jfunc))
3002 && --rdesc->refcount == 0)
3003 {
3004 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3005 if (!symbol)
3006 return false;
3007
3008 return remove_described_reference (symbol, rdesc);
3009 }
3010 return true;
3011 }
3012
3013 /* Try to find a destination for indirect edge IE that corresponds to a simple
3014 call or a call of a member function pointer and where the destination is a
3015 pointer formal parameter described by jump function JFUNC. If it can be
3016 determined, return the newly direct edge, otherwise return NULL.
3017 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3018
3019 static struct cgraph_edge *
3020 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3021 struct ipa_jump_func *jfunc,
3022 struct ipa_node_params *new_root_info)
3023 {
3024 struct cgraph_edge *cs;
3025 tree target;
3026 bool agg_contents = ie->indirect_info->agg_contents;
3027
3028 if (ie->indirect_info->agg_contents)
3029 target = ipa_find_agg_cst_for_param (&jfunc->agg,
3030 ie->indirect_info->offset,
3031 ie->indirect_info->by_ref);
3032 else
3033 target = ipa_value_from_jfunc (new_root_info, jfunc);
3034 if (!target)
3035 return NULL;
3036 cs = ipa_make_edge_direct_to_target (ie, target);
3037
3038 if (cs && !agg_contents)
3039 {
3040 bool ok;
3041 gcc_checking_assert (cs->callee
3042 && (cs != ie
3043 || jfunc->type != IPA_JF_CONST
3044 || !cgraph_node_for_jfunc (jfunc)
3045 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3046 ok = try_decrement_rdesc_refcount (jfunc);
3047 gcc_checking_assert (ok);
3048 }
3049
3050 return cs;
3051 }
3052
3053 /* Return the target to be used in cases of impossible devirtualization. IE
3054 and target (the latter can be NULL) are dumped when dumping is enabled. */
3055
3056 tree
3057 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3058 {
3059 if (dump_file)
3060 {
3061 if (target)
3062 fprintf (dump_file,
3063 "Type inconsistent devirtualization: %s/%i->%s\n",
3064 ie->caller->name (), ie->caller->order,
3065 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3066 else
3067 fprintf (dump_file,
3068 "No devirtualization target in %s/%i\n",
3069 ie->caller->name (), ie->caller->order);
3070 }
3071 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3072 cgraph_node::get_create (new_target);
3073 return new_target;
3074 }
3075
3076 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3077 call based on a formal parameter which is described by jump function JFUNC
3078 and if it can be determined, make it direct and return the direct edge.
3079 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3080 are relative to. */
3081
3082 static struct cgraph_edge *
3083 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3084 struct ipa_jump_func *jfunc,
3085 struct ipa_node_params *new_root_info)
3086 {
3087 tree binfo, target;
3088
3089 if (!flag_devirtualize)
3090 return NULL;
3091
3092 /* First try to do lookup via known virtual table pointer value. */
3093 if (!ie->indirect_info->by_ref)
3094 {
3095 tree vtable;
3096 unsigned HOST_WIDE_INT offset;
3097 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
3098 ie->indirect_info->offset,
3099 true);
3100 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3101 {
3102 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3103 vtable, offset);
3104 if (target)
3105 {
3106 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
3107 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
3108 || !possible_polymorphic_call_target_p
3109 (ie, cgraph_node::get (target)))
3110 target = ipa_impossible_devirt_target (ie, target);
3111 return ipa_make_edge_direct_to_target (ie, target);
3112 }
3113 }
3114 }
3115
3116 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
3117
3118 if (!binfo)
3119 return NULL;
3120
3121 if (TREE_CODE (binfo) != TREE_BINFO)
3122 {
3123 ipa_polymorphic_call_context context (binfo,
3124 ie->indirect_info->otr_type,
3125 ie->indirect_info->offset);
3126 vec <cgraph_node *>targets;
3127 bool final;
3128
3129 targets = possible_polymorphic_call_targets
3130 (ie->indirect_info->otr_type,
3131 ie->indirect_info->otr_token,
3132 context, &final);
3133 if (!final || targets.length () > 1)
3134 return NULL;
3135 if (targets.length () == 1)
3136 target = targets[0]->decl;
3137 else
3138 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3139 }
3140 else
3141 {
3142 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3143 ie->indirect_info->otr_type);
3144 if (binfo)
3145 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3146 binfo);
3147 else
3148 return NULL;
3149 }
3150
3151 if (target)
3152 {
3153 if (!possible_polymorphic_call_target_p (ie, cgraph_node::get (target)))
3154 target = ipa_impossible_devirt_target (ie, target);
3155 return ipa_make_edge_direct_to_target (ie, target);
3156 }
3157 else
3158 return NULL;
3159 }
3160
3161 /* Update the param called notes associated with NODE when CS is being inlined,
3162 assuming NODE is (potentially indirectly) inlined into CS->callee.
3163 Moreover, if the callee is discovered to be constant, create a new cgraph
3164 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3165 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3166
3167 static bool
3168 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3169 struct cgraph_node *node,
3170 vec<cgraph_edge *> *new_edges)
3171 {
3172 struct ipa_edge_args *top;
3173 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3174 struct ipa_node_params *new_root_info;
3175 bool res = false;
3176
3177 ipa_check_create_edge_args ();
3178 top = IPA_EDGE_REF (cs);
3179 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3180 ? cs->caller->global.inlined_to
3181 : cs->caller);
3182
3183 for (ie = node->indirect_calls; ie; ie = next_ie)
3184 {
3185 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3186 struct ipa_jump_func *jfunc;
3187 int param_index;
3188
3189 next_ie = ie->next_callee;
3190
3191 if (ici->param_index == -1)
3192 continue;
3193
3194 /* We must check range due to calls with variable number of arguments: */
3195 if (ici->param_index >= ipa_get_cs_argument_count (top))
3196 {
3197 ici->param_index = -1;
3198 continue;
3199 }
3200
3201 param_index = ici->param_index;
3202 jfunc = ipa_get_ith_jump_func (top, param_index);
3203
3204 if (!flag_indirect_inlining)
3205 new_direct_edge = NULL;
3206 else if (ici->polymorphic)
3207 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3208 new_root_info);
3209 else
3210 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3211 new_root_info);
3212 /* If speculation was removed, then we need to do nothing. */
3213 if (new_direct_edge && new_direct_edge != ie)
3214 {
3215 new_direct_edge->indirect_inlining_edge = 1;
3216 top = IPA_EDGE_REF (cs);
3217 res = true;
3218 }
3219 else if (new_direct_edge)
3220 {
3221 new_direct_edge->indirect_inlining_edge = 1;
3222 if (new_direct_edge->call_stmt)
3223 new_direct_edge->call_stmt_cannot_inline_p
3224 = !gimple_check_call_matching_types (
3225 new_direct_edge->call_stmt,
3226 new_direct_edge->callee->decl, false);
3227 if (new_edges)
3228 {
3229 new_edges->safe_push (new_direct_edge);
3230 res = true;
3231 }
3232 top = IPA_EDGE_REF (cs);
3233 }
3234 else if (jfunc->type == IPA_JF_PASS_THROUGH
3235 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3236 {
3237 if ((ici->agg_contents
3238 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3239 || (ici->polymorphic
3240 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3241 ici->param_index = -1;
3242 else
3243 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3244 }
3245 else if (jfunc->type == IPA_JF_ANCESTOR)
3246 {
3247 if ((ici->agg_contents
3248 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3249 || (ici->polymorphic
3250 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3251 ici->param_index = -1;
3252 else
3253 {
3254 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3255 if (ipa_get_jf_ancestor_offset (jfunc))
3256 ici->outer_type = NULL;
3257 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3258 }
3259 }
3260 else
3261 /* Either we can find a destination for this edge now or never. */
3262 ici->param_index = -1;
3263 }
3264
3265 return res;
3266 }
3267
3268 /* Recursively traverse subtree of NODE (including node) made of inlined
3269 cgraph_edges when CS has been inlined and invoke
3270 update_indirect_edges_after_inlining on all nodes and
3271 update_jump_functions_after_inlining on all non-inlined edges that lead out
3272 of this subtree. Newly discovered indirect edges will be added to
3273 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3274 created. */
3275
3276 static bool
3277 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3278 struct cgraph_node *node,
3279 vec<cgraph_edge *> *new_edges)
3280 {
3281 struct cgraph_edge *e;
3282 bool res;
3283
3284 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3285
3286 for (e = node->callees; e; e = e->next_callee)
3287 if (!e->inline_failed)
3288 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3289 else
3290 update_jump_functions_after_inlining (cs, e);
3291 for (e = node->indirect_calls; e; e = e->next_callee)
3292 update_jump_functions_after_inlining (cs, e);
3293
3294 return res;
3295 }
3296
3297 /* Combine two controlled uses counts as done during inlining. */
3298
3299 static int
3300 combine_controlled_uses_counters (int c, int d)
3301 {
3302 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3303 return IPA_UNDESCRIBED_USE;
3304 else
3305 return c + d - 1;
3306 }
3307
3308 /* Propagate number of controlled users from CS->caleee to the new root of the
3309 tree of inlined nodes. */
3310
3311 static void
3312 propagate_controlled_uses (struct cgraph_edge *cs)
3313 {
3314 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3315 struct cgraph_node *new_root = cs->caller->global.inlined_to
3316 ? cs->caller->global.inlined_to : cs->caller;
3317 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3318 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3319 int count, i;
3320
3321 count = MIN (ipa_get_cs_argument_count (args),
3322 ipa_get_param_count (old_root_info));
3323 for (i = 0; i < count; i++)
3324 {
3325 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3326 struct ipa_cst_ref_desc *rdesc;
3327
3328 if (jf->type == IPA_JF_PASS_THROUGH)
3329 {
3330 int src_idx, c, d;
3331 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3332 c = ipa_get_controlled_uses (new_root_info, src_idx);
3333 d = ipa_get_controlled_uses (old_root_info, i);
3334
3335 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3336 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3337 c = combine_controlled_uses_counters (c, d);
3338 ipa_set_controlled_uses (new_root_info, src_idx, c);
3339 if (c == 0 && new_root_info->ipcp_orig_node)
3340 {
3341 struct cgraph_node *n;
3342 struct ipa_ref *ref;
3343 tree t = new_root_info->known_vals[src_idx];
3344
3345 if (t && TREE_CODE (t) == ADDR_EXPR
3346 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3347 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3348 && (ref = new_root->find_reference (n, NULL, 0)))
3349 {
3350 if (dump_file)
3351 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3352 "reference from %s/%i to %s/%i.\n",
3353 xstrdup (new_root->name ()),
3354 new_root->order,
3355 xstrdup (n->name ()), n->order);
3356 ref->remove_reference ();
3357 }
3358 }
3359 }
3360 else if (jf->type == IPA_JF_CONST
3361 && (rdesc = jfunc_rdesc_usable (jf)))
3362 {
3363 int d = ipa_get_controlled_uses (old_root_info, i);
3364 int c = rdesc->refcount;
3365 rdesc->refcount = combine_controlled_uses_counters (c, d);
3366 if (rdesc->refcount == 0)
3367 {
3368 tree cst = ipa_get_jf_constant (jf);
3369 struct cgraph_node *n;
3370 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3371 && TREE_CODE (TREE_OPERAND (cst, 0))
3372 == FUNCTION_DECL);
3373 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3374 if (n)
3375 {
3376 struct cgraph_node *clone;
3377 bool ok;
3378 ok = remove_described_reference (n, rdesc);
3379 gcc_checking_assert (ok);
3380
3381 clone = cs->caller;
3382 while (clone->global.inlined_to
3383 && clone != rdesc->cs->caller
3384 && IPA_NODE_REF (clone)->ipcp_orig_node)
3385 {
3386 struct ipa_ref *ref;
3387 ref = clone->find_reference (n, NULL, 0);
3388 if (ref)
3389 {
3390 if (dump_file)
3391 fprintf (dump_file, "ipa-prop: Removing "
3392 "cloning-created reference "
3393 "from %s/%i to %s/%i.\n",
3394 xstrdup (clone->name ()),
3395 clone->order,
3396 xstrdup (n->name ()),
3397 n->order);
3398 ref->remove_reference ();
3399 }
3400 clone = clone->callers->caller;
3401 }
3402 }
3403 }
3404 }
3405 }
3406
3407 for (i = ipa_get_param_count (old_root_info);
3408 i < ipa_get_cs_argument_count (args);
3409 i++)
3410 {
3411 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3412
3413 if (jf->type == IPA_JF_CONST)
3414 {
3415 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3416 if (rdesc)
3417 rdesc->refcount = IPA_UNDESCRIBED_USE;
3418 }
3419 else if (jf->type == IPA_JF_PASS_THROUGH)
3420 ipa_set_controlled_uses (new_root_info,
3421 jf->value.pass_through.formal_id,
3422 IPA_UNDESCRIBED_USE);
3423 }
3424 }
3425
3426 /* Update jump functions and call note functions on inlining the call site CS.
3427 CS is expected to lead to a node already cloned by
3428 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3429 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3430 created. */
3431
3432 bool
3433 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3434 vec<cgraph_edge *> *new_edges)
3435 {
3436 bool changed;
3437 /* Do nothing if the preparation phase has not been carried out yet
3438 (i.e. during early inlining). */
3439 if (!ipa_node_params_vector.exists ())
3440 return false;
3441 gcc_assert (ipa_edge_args_vector);
3442
3443 propagate_controlled_uses (cs);
3444 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3445
3446 return changed;
3447 }
3448
3449 /* Frees all dynamically allocated structures that the argument info points
3450 to. */
3451
3452 void
3453 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3454 {
3455 vec_free (args->jump_functions);
3456 memset (args, 0, sizeof (*args));
3457 }
3458
3459 /* Free all ipa_edge structures. */
3460
3461 void
3462 ipa_free_all_edge_args (void)
3463 {
3464 int i;
3465 struct ipa_edge_args *args;
3466
3467 if (!ipa_edge_args_vector)
3468 return;
3469
3470 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3471 ipa_free_edge_args_substructures (args);
3472
3473 vec_free (ipa_edge_args_vector);
3474 }
3475
3476 /* Frees all dynamically allocated structures that the param info points
3477 to. */
3478
3479 void
3480 ipa_free_node_params_substructures (struct ipa_node_params *info)
3481 {
3482 info->descriptors.release ();
3483 free (info->lattices);
3484 /* Lattice values and their sources are deallocated with their alocation
3485 pool. */
3486 info->known_vals.release ();
3487 memset (info, 0, sizeof (*info));
3488 }
3489
3490 /* Free all ipa_node_params structures. */
3491
3492 void
3493 ipa_free_all_node_params (void)
3494 {
3495 int i;
3496 struct ipa_node_params *info;
3497
3498 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3499 ipa_free_node_params_substructures (info);
3500
3501 ipa_node_params_vector.release ();
3502 }
3503
3504 /* Set the aggregate replacements of NODE to be AGGVALS. */
3505
3506 void
3507 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3508 struct ipa_agg_replacement_value *aggvals)
3509 {
3510 if (vec_safe_length (ipa_node_agg_replacements)
3511 <= (unsigned) symtab->cgraph_max_uid)
3512 vec_safe_grow_cleared (ipa_node_agg_replacements,
3513 symtab->cgraph_max_uid + 1);
3514
3515 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3516 }
3517
3518 /* Hook that is called by cgraph.c when an edge is removed. */
3519
3520 static void
3521 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3522 {
3523 struct ipa_edge_args *args;
3524
3525 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3526 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3527 return;
3528
3529 args = IPA_EDGE_REF (cs);
3530 if (args->jump_functions)
3531 {
3532 struct ipa_jump_func *jf;
3533 int i;
3534 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3535 {
3536 struct ipa_cst_ref_desc *rdesc;
3537 try_decrement_rdesc_refcount (jf);
3538 if (jf->type == IPA_JF_CONST
3539 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3540 && rdesc->cs == cs)
3541 rdesc->cs = NULL;
3542 }
3543 }
3544
3545 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3546 }
3547
3548 /* Hook that is called by cgraph.c when a node is removed. */
3549
3550 static void
3551 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3552 {
3553 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3554 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3555 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3556 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3557 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3558 }
3559
3560 /* Hook that is called by cgraph.c when an edge is duplicated. */
3561
3562 static void
3563 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3564 __attribute__((unused)) void *data)
3565 {
3566 struct ipa_edge_args *old_args, *new_args;
3567 unsigned int i;
3568
3569 ipa_check_create_edge_args ();
3570
3571 old_args = IPA_EDGE_REF (src);
3572 new_args = IPA_EDGE_REF (dst);
3573
3574 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3575
3576 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3577 {
3578 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3579 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3580
3581 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3582
3583 if (src_jf->type == IPA_JF_CONST)
3584 {
3585 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3586
3587 if (!src_rdesc)
3588 dst_jf->value.constant.rdesc = NULL;
3589 else if (src->caller == dst->caller)
3590 {
3591 struct ipa_ref *ref;
3592 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3593 gcc_checking_assert (n);
3594 ref = src->caller->find_reference (n, src->call_stmt,
3595 src->lto_stmt_uid);
3596 gcc_checking_assert (ref);
3597 dst->caller->clone_reference (ref, ref->stmt);
3598
3599 gcc_checking_assert (ipa_refdesc_pool);
3600 struct ipa_cst_ref_desc *dst_rdesc
3601 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3602 dst_rdesc->cs = dst;
3603 dst_rdesc->refcount = src_rdesc->refcount;
3604 dst_rdesc->next_duplicate = NULL;
3605 dst_jf->value.constant.rdesc = dst_rdesc;
3606 }
3607 else if (src_rdesc->cs == src)
3608 {
3609 struct ipa_cst_ref_desc *dst_rdesc;
3610 gcc_checking_assert (ipa_refdesc_pool);
3611 dst_rdesc
3612 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3613 dst_rdesc->cs = dst;
3614 dst_rdesc->refcount = src_rdesc->refcount;
3615 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3616 src_rdesc->next_duplicate = dst_rdesc;
3617 dst_jf->value.constant.rdesc = dst_rdesc;
3618 }
3619 else
3620 {
3621 struct ipa_cst_ref_desc *dst_rdesc;
3622 /* This can happen during inlining, when a JFUNC can refer to a
3623 reference taken in a function up in the tree of inline clones.
3624 We need to find the duplicate that refers to our tree of
3625 inline clones. */
3626
3627 gcc_assert (dst->caller->global.inlined_to);
3628 for (dst_rdesc = src_rdesc->next_duplicate;
3629 dst_rdesc;
3630 dst_rdesc = dst_rdesc->next_duplicate)
3631 {
3632 struct cgraph_node *top;
3633 top = dst_rdesc->cs->caller->global.inlined_to
3634 ? dst_rdesc->cs->caller->global.inlined_to
3635 : dst_rdesc->cs->caller;
3636 if (dst->caller->global.inlined_to == top)
3637 break;
3638 }
3639 gcc_assert (dst_rdesc);
3640 dst_jf->value.constant.rdesc = dst_rdesc;
3641 }
3642 }
3643 }
3644 }
3645
3646 /* Hook that is called by cgraph.c when a node is duplicated. */
3647
3648 static void
3649 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3650 ATTRIBUTE_UNUSED void *data)
3651 {
3652 struct ipa_node_params *old_info, *new_info;
3653 struct ipa_agg_replacement_value *old_av, *new_av;
3654
3655 ipa_check_create_node_params ();
3656 old_info = IPA_NODE_REF (src);
3657 new_info = IPA_NODE_REF (dst);
3658
3659 new_info->descriptors = old_info->descriptors.copy ();
3660 new_info->lattices = NULL;
3661 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3662
3663 new_info->analysis_done = old_info->analysis_done;
3664 new_info->node_enqueued = old_info->node_enqueued;
3665
3666 old_av = ipa_get_agg_replacements_for_node (src);
3667 if (!old_av)
3668 return;
3669
3670 new_av = NULL;
3671 while (old_av)
3672 {
3673 struct ipa_agg_replacement_value *v;
3674
3675 v = ggc_alloc<ipa_agg_replacement_value> ();
3676 memcpy (v, old_av, sizeof (*v));
3677 v->next = new_av;
3678 new_av = v;
3679 old_av = old_av->next;
3680 }
3681 ipa_set_node_agg_value_chain (dst, new_av);
3682 }
3683
3684
3685 /* Analyze newly added function into callgraph. */
3686
3687 static void
3688 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3689 {
3690 if (node->has_gimple_body_p ())
3691 ipa_analyze_node (node);
3692 }
3693
3694 /* Register our cgraph hooks if they are not already there. */
3695
3696 void
3697 ipa_register_cgraph_hooks (void)
3698 {
3699 if (!edge_removal_hook_holder)
3700 edge_removal_hook_holder =
3701 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3702 if (!node_removal_hook_holder)
3703 node_removal_hook_holder =
3704 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3705 if (!edge_duplication_hook_holder)
3706 edge_duplication_hook_holder =
3707 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3708 if (!node_duplication_hook_holder)
3709 node_duplication_hook_holder =
3710 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3711 function_insertion_hook_holder =
3712 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3713 }
3714
3715 /* Unregister our cgraph hooks if they are not already there. */
3716
3717 static void
3718 ipa_unregister_cgraph_hooks (void)
3719 {
3720 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3721 edge_removal_hook_holder = NULL;
3722 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3723 node_removal_hook_holder = NULL;
3724 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3725 edge_duplication_hook_holder = NULL;
3726 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3727 node_duplication_hook_holder = NULL;
3728 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3729 function_insertion_hook_holder = NULL;
3730 }
3731
3732 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3733 longer needed after ipa-cp. */
3734
3735 void
3736 ipa_free_all_structures_after_ipa_cp (void)
3737 {
3738 if (!optimize)
3739 {
3740 ipa_free_all_edge_args ();
3741 ipa_free_all_node_params ();
3742 free_alloc_pool (ipcp_sources_pool);
3743 free_alloc_pool (ipcp_values_pool);
3744 free_alloc_pool (ipcp_agg_lattice_pool);
3745 ipa_unregister_cgraph_hooks ();
3746 if (ipa_refdesc_pool)
3747 free_alloc_pool (ipa_refdesc_pool);
3748 }
3749 }
3750
3751 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3752 longer needed after indirect inlining. */
3753
3754 void
3755 ipa_free_all_structures_after_iinln (void)
3756 {
3757 ipa_free_all_edge_args ();
3758 ipa_free_all_node_params ();
3759 ipa_unregister_cgraph_hooks ();
3760 if (ipcp_sources_pool)
3761 free_alloc_pool (ipcp_sources_pool);
3762 if (ipcp_values_pool)
3763 free_alloc_pool (ipcp_values_pool);
3764 if (ipcp_agg_lattice_pool)
3765 free_alloc_pool (ipcp_agg_lattice_pool);
3766 if (ipa_refdesc_pool)
3767 free_alloc_pool (ipa_refdesc_pool);
3768 }
3769
3770 /* Print ipa_tree_map data structures of all functions in the
3771 callgraph to F. */
3772
3773 void
3774 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3775 {
3776 int i, count;
3777 struct ipa_node_params *info;
3778
3779 if (!node->definition)
3780 return;
3781 info = IPA_NODE_REF (node);
3782 fprintf (f, " function %s/%i parameter descriptors:\n",
3783 node->name (), node->order);
3784 count = ipa_get_param_count (info);
3785 for (i = 0; i < count; i++)
3786 {
3787 int c;
3788
3789 fprintf (f, " ");
3790 ipa_dump_param (f, info, i);
3791 if (ipa_is_param_used (info, i))
3792 fprintf (f, " used");
3793 c = ipa_get_controlled_uses (info, i);
3794 if (c == IPA_UNDESCRIBED_USE)
3795 fprintf (f, " undescribed_use");
3796 else
3797 fprintf (f, " controlled_uses=%i", c);
3798 fprintf (f, "\n");
3799 }
3800 }
3801
3802 /* Print ipa_tree_map data structures of all functions in the
3803 callgraph to F. */
3804
3805 void
3806 ipa_print_all_params (FILE * f)
3807 {
3808 struct cgraph_node *node;
3809
3810 fprintf (f, "\nFunction parameters:\n");
3811 FOR_EACH_FUNCTION (node)
3812 ipa_print_node_params (f, node);
3813 }
3814
3815 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3816
3817 vec<tree>
3818 ipa_get_vector_of_formal_parms (tree fndecl)
3819 {
3820 vec<tree> args;
3821 int count;
3822 tree parm;
3823
3824 gcc_assert (!flag_wpa);
3825 count = count_formal_params (fndecl);
3826 args.create (count);
3827 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3828 args.quick_push (parm);
3829
3830 return args;
3831 }
3832
3833 /* Return a heap allocated vector containing types of formal parameters of
3834 function type FNTYPE. */
3835
3836 vec<tree>
3837 ipa_get_vector_of_formal_parm_types (tree fntype)
3838 {
3839 vec<tree> types;
3840 int count = 0;
3841 tree t;
3842
3843 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3844 count++;
3845
3846 types.create (count);
3847 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3848 types.quick_push (TREE_VALUE (t));
3849
3850 return types;
3851 }
3852
3853 /* Modify the function declaration FNDECL and its type according to the plan in
3854 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3855 to reflect the actual parameters being modified which are determined by the
3856 base_index field. */
3857
3858 void
3859 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3860 {
3861 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3862 tree orig_type = TREE_TYPE (fndecl);
3863 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3864
3865 /* The following test is an ugly hack, some functions simply don't have any
3866 arguments in their type. This is probably a bug but well... */
3867 bool care_for_types = (old_arg_types != NULL_TREE);
3868 bool last_parm_void;
3869 vec<tree> otypes;
3870 if (care_for_types)
3871 {
3872 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3873 == void_type_node);
3874 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3875 if (last_parm_void)
3876 gcc_assert (oparms.length () + 1 == otypes.length ());
3877 else
3878 gcc_assert (oparms.length () == otypes.length ());
3879 }
3880 else
3881 {
3882 last_parm_void = false;
3883 otypes.create (0);
3884 }
3885
3886 int len = adjustments.length ();
3887 tree *link = &DECL_ARGUMENTS (fndecl);
3888 tree new_arg_types = NULL;
3889 for (int i = 0; i < len; i++)
3890 {
3891 struct ipa_parm_adjustment *adj;
3892 gcc_assert (link);
3893
3894 adj = &adjustments[i];
3895 tree parm;
3896 if (adj->op == IPA_PARM_OP_NEW)
3897 parm = NULL;
3898 else
3899 parm = oparms[adj->base_index];
3900 adj->base = parm;
3901
3902 if (adj->op == IPA_PARM_OP_COPY)
3903 {
3904 if (care_for_types)
3905 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3906 new_arg_types);
3907 *link = parm;
3908 link = &DECL_CHAIN (parm);
3909 }
3910 else if (adj->op != IPA_PARM_OP_REMOVE)
3911 {
3912 tree new_parm;
3913 tree ptype;
3914
3915 if (adj->by_ref)
3916 ptype = build_pointer_type (adj->type);
3917 else
3918 {
3919 ptype = adj->type;
3920 if (is_gimple_reg_type (ptype))
3921 {
3922 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3923 if (TYPE_ALIGN (ptype) < malign)
3924 ptype = build_aligned_type (ptype, malign);
3925 }
3926 }
3927
3928 if (care_for_types)
3929 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3930
3931 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3932 ptype);
3933 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3934 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3935 DECL_ARTIFICIAL (new_parm) = 1;
3936 DECL_ARG_TYPE (new_parm) = ptype;
3937 DECL_CONTEXT (new_parm) = fndecl;
3938 TREE_USED (new_parm) = 1;
3939 DECL_IGNORED_P (new_parm) = 1;
3940 layout_decl (new_parm, 0);
3941
3942 if (adj->op == IPA_PARM_OP_NEW)
3943 adj->base = NULL;
3944 else
3945 adj->base = parm;
3946 adj->new_decl = new_parm;
3947
3948 *link = new_parm;
3949 link = &DECL_CHAIN (new_parm);
3950 }
3951 }
3952
3953 *link = NULL_TREE;
3954
3955 tree new_reversed = NULL;
3956 if (care_for_types)
3957 {
3958 new_reversed = nreverse (new_arg_types);
3959 if (last_parm_void)
3960 {
3961 if (new_reversed)
3962 TREE_CHAIN (new_arg_types) = void_list_node;
3963 else
3964 new_reversed = void_list_node;
3965 }
3966 }
3967
3968 /* Use copy_node to preserve as much as possible from original type
3969 (debug info, attribute lists etc.)
3970 Exception is METHOD_TYPEs must have THIS argument.
3971 When we are asked to remove it, we need to build new FUNCTION_TYPE
3972 instead. */
3973 tree new_type = NULL;
3974 if (TREE_CODE (orig_type) != METHOD_TYPE
3975 || (adjustments[0].op == IPA_PARM_OP_COPY
3976 && adjustments[0].base_index == 0))
3977 {
3978 new_type = build_distinct_type_copy (orig_type);
3979 TYPE_ARG_TYPES (new_type) = new_reversed;
3980 }
3981 else
3982 {
3983 new_type
3984 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3985 new_reversed));
3986 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3987 DECL_VINDEX (fndecl) = NULL_TREE;
3988 }
3989
3990 /* When signature changes, we need to clear builtin info. */
3991 if (DECL_BUILT_IN (fndecl))
3992 {
3993 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3994 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3995 }
3996
3997 /* This is a new type, not a copy of an old type. Need to reassociate
3998 variants. We can handle everything except the main variant lazily. */
3999 tree t = TYPE_MAIN_VARIANT (orig_type);
4000 if (orig_type != t)
4001 {
4002 TYPE_MAIN_VARIANT (new_type) = t;
4003 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
4004 TYPE_NEXT_VARIANT (t) = new_type;
4005 }
4006 else
4007 {
4008 TYPE_MAIN_VARIANT (new_type) = new_type;
4009 TYPE_NEXT_VARIANT (new_type) = NULL;
4010 }
4011
4012 TREE_TYPE (fndecl) = new_type;
4013 DECL_VIRTUAL_P (fndecl) = 0;
4014 DECL_LANG_SPECIFIC (fndecl) = NULL;
4015 otypes.release ();
4016 oparms.release ();
4017 }
4018
4019 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4020 If this is a directly recursive call, CS must be NULL. Otherwise it must
4021 contain the corresponding call graph edge. */
4022
4023 void
4024 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
4025 ipa_parm_adjustment_vec adjustments)
4026 {
4027 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4028 vec<tree> vargs;
4029 vec<tree, va_gc> **debug_args = NULL;
4030 gimple new_stmt;
4031 gimple_stmt_iterator gsi, prev_gsi;
4032 tree callee_decl;
4033 int i, len;
4034
4035 len = adjustments.length ();
4036 vargs.create (len);
4037 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4038 current_node->remove_stmt_references (stmt);
4039
4040 gsi = gsi_for_stmt (stmt);
4041 prev_gsi = gsi;
4042 gsi_prev (&prev_gsi);
4043 for (i = 0; i < len; i++)
4044 {
4045 struct ipa_parm_adjustment *adj;
4046
4047 adj = &adjustments[i];
4048
4049 if (adj->op == IPA_PARM_OP_COPY)
4050 {
4051 tree arg = gimple_call_arg (stmt, adj->base_index);
4052
4053 vargs.quick_push (arg);
4054 }
4055 else if (adj->op != IPA_PARM_OP_REMOVE)
4056 {
4057 tree expr, base, off;
4058 location_t loc;
4059 unsigned int deref_align = 0;
4060 bool deref_base = false;
4061
4062 /* We create a new parameter out of the value of the old one, we can
4063 do the following kind of transformations:
4064
4065 - A scalar passed by reference is converted to a scalar passed by
4066 value. (adj->by_ref is false and the type of the original
4067 actual argument is a pointer to a scalar).
4068
4069 - A part of an aggregate is passed instead of the whole aggregate.
4070 The part can be passed either by value or by reference, this is
4071 determined by value of adj->by_ref. Moreover, the code below
4072 handles both situations when the original aggregate is passed by
4073 value (its type is not a pointer) and when it is passed by
4074 reference (it is a pointer to an aggregate).
4075
4076 When the new argument is passed by reference (adj->by_ref is true)
4077 it must be a part of an aggregate and therefore we form it by
4078 simply taking the address of a reference inside the original
4079 aggregate. */
4080
4081 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4082 base = gimple_call_arg (stmt, adj->base_index);
4083 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4084 : EXPR_LOCATION (base);
4085
4086 if (TREE_CODE (base) != ADDR_EXPR
4087 && POINTER_TYPE_P (TREE_TYPE (base)))
4088 off = build_int_cst (adj->alias_ptr_type,
4089 adj->offset / BITS_PER_UNIT);
4090 else
4091 {
4092 HOST_WIDE_INT base_offset;
4093 tree prev_base;
4094 bool addrof;
4095
4096 if (TREE_CODE (base) == ADDR_EXPR)
4097 {
4098 base = TREE_OPERAND (base, 0);
4099 addrof = true;
4100 }
4101 else
4102 addrof = false;
4103 prev_base = base;
4104 base = get_addr_base_and_unit_offset (base, &base_offset);
4105 /* Aggregate arguments can have non-invariant addresses. */
4106 if (!base)
4107 {
4108 base = build_fold_addr_expr (prev_base);
4109 off = build_int_cst (adj->alias_ptr_type,
4110 adj->offset / BITS_PER_UNIT);
4111 }
4112 else if (TREE_CODE (base) == MEM_REF)
4113 {
4114 if (!addrof)
4115 {
4116 deref_base = true;
4117 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4118 }
4119 off = build_int_cst (adj->alias_ptr_type,
4120 base_offset
4121 + adj->offset / BITS_PER_UNIT);
4122 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4123 off);
4124 base = TREE_OPERAND (base, 0);
4125 }
4126 else
4127 {
4128 off = build_int_cst (adj->alias_ptr_type,
4129 base_offset
4130 + adj->offset / BITS_PER_UNIT);
4131 base = build_fold_addr_expr (base);
4132 }
4133 }
4134
4135 if (!adj->by_ref)
4136 {
4137 tree type = adj->type;
4138 unsigned int align;
4139 unsigned HOST_WIDE_INT misalign;
4140
4141 if (deref_base)
4142 {
4143 align = deref_align;
4144 misalign = 0;
4145 }
4146 else
4147 {
4148 get_pointer_alignment_1 (base, &align, &misalign);
4149 if (TYPE_ALIGN (type) > align)
4150 align = TYPE_ALIGN (type);
4151 }
4152 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4153 * BITS_PER_UNIT);
4154 misalign = misalign & (align - 1);
4155 if (misalign != 0)
4156 align = (misalign & -misalign);
4157 if (align < TYPE_ALIGN (type))
4158 type = build_aligned_type (type, align);
4159 base = force_gimple_operand_gsi (&gsi, base,
4160 true, NULL, true, GSI_SAME_STMT);
4161 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4162 /* If expr is not a valid gimple call argument emit
4163 a load into a temporary. */
4164 if (is_gimple_reg_type (TREE_TYPE (expr)))
4165 {
4166 gimple tem = gimple_build_assign (NULL_TREE, expr);
4167 if (gimple_in_ssa_p (cfun))
4168 {
4169 gimple_set_vuse (tem, gimple_vuse (stmt));
4170 expr = make_ssa_name (TREE_TYPE (expr), tem);
4171 }
4172 else
4173 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4174 gimple_assign_set_lhs (tem, expr);
4175 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4176 }
4177 }
4178 else
4179 {
4180 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4181 expr = build_fold_addr_expr (expr);
4182 expr = force_gimple_operand_gsi (&gsi, expr,
4183 true, NULL, true, GSI_SAME_STMT);
4184 }
4185 vargs.quick_push (expr);
4186 }
4187 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4188 {
4189 unsigned int ix;
4190 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4191 gimple def_temp;
4192
4193 arg = gimple_call_arg (stmt, adj->base_index);
4194 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4195 {
4196 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4197 continue;
4198 arg = fold_convert_loc (gimple_location (stmt),
4199 TREE_TYPE (origin), arg);
4200 }
4201 if (debug_args == NULL)
4202 debug_args = decl_debug_args_insert (callee_decl);
4203 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4204 if (ddecl == origin)
4205 {
4206 ddecl = (**debug_args)[ix + 1];
4207 break;
4208 }
4209 if (ddecl == NULL)
4210 {
4211 ddecl = make_node (DEBUG_EXPR_DECL);
4212 DECL_ARTIFICIAL (ddecl) = 1;
4213 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4214 DECL_MODE (ddecl) = DECL_MODE (origin);
4215
4216 vec_safe_push (*debug_args, origin);
4217 vec_safe_push (*debug_args, ddecl);
4218 }
4219 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4220 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4221 }
4222 }
4223
4224 if (dump_file && (dump_flags & TDF_DETAILS))
4225 {
4226 fprintf (dump_file, "replacing stmt:");
4227 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4228 }
4229
4230 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4231 vargs.release ();
4232 if (gimple_call_lhs (stmt))
4233 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4234
4235 gimple_set_block (new_stmt, gimple_block (stmt));
4236 if (gimple_has_location (stmt))
4237 gimple_set_location (new_stmt, gimple_location (stmt));
4238 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4239 gimple_call_copy_flags (new_stmt, stmt);
4240 if (gimple_in_ssa_p (cfun))
4241 {
4242 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4243 if (gimple_vdef (stmt))
4244 {
4245 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4246 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4247 }
4248 }
4249
4250 if (dump_file && (dump_flags & TDF_DETAILS))
4251 {
4252 fprintf (dump_file, "with stmt:");
4253 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4254 fprintf (dump_file, "\n");
4255 }
4256 gsi_replace (&gsi, new_stmt, true);
4257 if (cs)
4258 cs->set_call_stmt (new_stmt);
4259 do
4260 {
4261 current_node->record_stmt_references (gsi_stmt (gsi));
4262 gsi_prev (&gsi);
4263 }
4264 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4265 }
4266
4267 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4268 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4269 specifies whether the function should care about type incompatibility the
4270 current and new expressions. If it is false, the function will leave
4271 incompatibility issues to the caller. Return true iff the expression
4272 was modified. */
4273
4274 bool
4275 ipa_modify_expr (tree *expr, bool convert,
4276 ipa_parm_adjustment_vec adjustments)
4277 {
4278 struct ipa_parm_adjustment *cand
4279 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4280 if (!cand)
4281 return false;
4282
4283 tree src;
4284 if (cand->by_ref)
4285 src = build_simple_mem_ref (cand->new_decl);
4286 else
4287 src = cand->new_decl;
4288
4289 if (dump_file && (dump_flags & TDF_DETAILS))
4290 {
4291 fprintf (dump_file, "About to replace expr ");
4292 print_generic_expr (dump_file, *expr, 0);
4293 fprintf (dump_file, " with ");
4294 print_generic_expr (dump_file, src, 0);
4295 fprintf (dump_file, "\n");
4296 }
4297
4298 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4299 {
4300 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4301 *expr = vce;
4302 }
4303 else
4304 *expr = src;
4305 return true;
4306 }
4307
4308 /* If T is an SSA_NAME, return NULL if it is not a default def or
4309 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4310 the base variable is always returned, regardless if it is a default
4311 def. Return T if it is not an SSA_NAME. */
4312
4313 static tree
4314 get_ssa_base_param (tree t, bool ignore_default_def)
4315 {
4316 if (TREE_CODE (t) == SSA_NAME)
4317 {
4318 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4319 return SSA_NAME_VAR (t);
4320 else
4321 return NULL_TREE;
4322 }
4323 return t;
4324 }
4325
4326 /* Given an expression, return an adjustment entry specifying the
4327 transformation to be done on EXPR. If no suitable adjustment entry
4328 was found, returns NULL.
4329
4330 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4331 default def, otherwise bail on them.
4332
4333 If CONVERT is non-NULL, this function will set *CONVERT if the
4334 expression provided is a component reference. ADJUSTMENTS is the
4335 adjustments vector. */
4336
4337 ipa_parm_adjustment *
4338 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4339 ipa_parm_adjustment_vec adjustments,
4340 bool ignore_default_def)
4341 {
4342 if (TREE_CODE (**expr) == BIT_FIELD_REF
4343 || TREE_CODE (**expr) == IMAGPART_EXPR
4344 || TREE_CODE (**expr) == REALPART_EXPR)
4345 {
4346 *expr = &TREE_OPERAND (**expr, 0);
4347 if (convert)
4348 *convert = true;
4349 }
4350
4351 HOST_WIDE_INT offset, size, max_size;
4352 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4353 if (!base || size == -1 || max_size == -1)
4354 return NULL;
4355
4356 if (TREE_CODE (base) == MEM_REF)
4357 {
4358 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4359 base = TREE_OPERAND (base, 0);
4360 }
4361
4362 base = get_ssa_base_param (base, ignore_default_def);
4363 if (!base || TREE_CODE (base) != PARM_DECL)
4364 return NULL;
4365
4366 struct ipa_parm_adjustment *cand = NULL;
4367 unsigned int len = adjustments.length ();
4368 for (unsigned i = 0; i < len; i++)
4369 {
4370 struct ipa_parm_adjustment *adj = &adjustments[i];
4371
4372 if (adj->base == base
4373 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4374 {
4375 cand = adj;
4376 break;
4377 }
4378 }
4379
4380 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4381 return NULL;
4382 return cand;
4383 }
4384
4385 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4386
4387 static bool
4388 index_in_adjustments_multiple_times_p (int base_index,
4389 ipa_parm_adjustment_vec adjustments)
4390 {
4391 int i, len = adjustments.length ();
4392 bool one = false;
4393
4394 for (i = 0; i < len; i++)
4395 {
4396 struct ipa_parm_adjustment *adj;
4397 adj = &adjustments[i];
4398
4399 if (adj->base_index == base_index)
4400 {
4401 if (one)
4402 return true;
4403 else
4404 one = true;
4405 }
4406 }
4407 return false;
4408 }
4409
4410
4411 /* Return adjustments that should have the same effect on function parameters
4412 and call arguments as if they were first changed according to adjustments in
4413 INNER and then by adjustments in OUTER. */
4414
4415 ipa_parm_adjustment_vec
4416 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4417 ipa_parm_adjustment_vec outer)
4418 {
4419 int i, outlen = outer.length ();
4420 int inlen = inner.length ();
4421 int removals = 0;
4422 ipa_parm_adjustment_vec adjustments, tmp;
4423
4424 tmp.create (inlen);
4425 for (i = 0; i < inlen; i++)
4426 {
4427 struct ipa_parm_adjustment *n;
4428 n = &inner[i];
4429
4430 if (n->op == IPA_PARM_OP_REMOVE)
4431 removals++;
4432 else
4433 {
4434 /* FIXME: Handling of new arguments are not implemented yet. */
4435 gcc_assert (n->op != IPA_PARM_OP_NEW);
4436 tmp.quick_push (*n);
4437 }
4438 }
4439
4440 adjustments.create (outlen + removals);
4441 for (i = 0; i < outlen; i++)
4442 {
4443 struct ipa_parm_adjustment r;
4444 struct ipa_parm_adjustment *out = &outer[i];
4445 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4446
4447 memset (&r, 0, sizeof (r));
4448 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4449 if (out->op == IPA_PARM_OP_REMOVE)
4450 {
4451 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4452 {
4453 r.op = IPA_PARM_OP_REMOVE;
4454 adjustments.quick_push (r);
4455 }
4456 continue;
4457 }
4458 else
4459 {
4460 /* FIXME: Handling of new arguments are not implemented yet. */
4461 gcc_assert (out->op != IPA_PARM_OP_NEW);
4462 }
4463
4464 r.base_index = in->base_index;
4465 r.type = out->type;
4466
4467 /* FIXME: Create nonlocal value too. */
4468
4469 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4470 r.op = IPA_PARM_OP_COPY;
4471 else if (in->op == IPA_PARM_OP_COPY)
4472 r.offset = out->offset;
4473 else if (out->op == IPA_PARM_OP_COPY)
4474 r.offset = in->offset;
4475 else
4476 r.offset = in->offset + out->offset;
4477 adjustments.quick_push (r);
4478 }
4479
4480 for (i = 0; i < inlen; i++)
4481 {
4482 struct ipa_parm_adjustment *n = &inner[i];
4483
4484 if (n->op == IPA_PARM_OP_REMOVE)
4485 adjustments.quick_push (*n);
4486 }
4487
4488 tmp.release ();
4489 return adjustments;
4490 }
4491
4492 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4493 friendly way, assuming they are meant to be applied to FNDECL. */
4494
4495 void
4496 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4497 tree fndecl)
4498 {
4499 int i, len = adjustments.length ();
4500 bool first = true;
4501 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4502
4503 fprintf (file, "IPA param adjustments: ");
4504 for (i = 0; i < len; i++)
4505 {
4506 struct ipa_parm_adjustment *adj;
4507 adj = &adjustments[i];
4508
4509 if (!first)
4510 fprintf (file, " ");
4511 else
4512 first = false;
4513
4514 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4515 print_generic_expr (file, parms[adj->base_index], 0);
4516 if (adj->base)
4517 {
4518 fprintf (file, ", base: ");
4519 print_generic_expr (file, adj->base, 0);
4520 }
4521 if (adj->new_decl)
4522 {
4523 fprintf (file, ", new_decl: ");
4524 print_generic_expr (file, adj->new_decl, 0);
4525 }
4526 if (adj->new_ssa_base)
4527 {
4528 fprintf (file, ", new_ssa_base: ");
4529 print_generic_expr (file, adj->new_ssa_base, 0);
4530 }
4531
4532 if (adj->op == IPA_PARM_OP_COPY)
4533 fprintf (file, ", copy_param");
4534 else if (adj->op == IPA_PARM_OP_REMOVE)
4535 fprintf (file, ", remove_param");
4536 else
4537 fprintf (file, ", offset %li", (long) adj->offset);
4538 if (adj->by_ref)
4539 fprintf (file, ", by_ref");
4540 print_node_brief (file, ", type: ", adj->type, 0);
4541 fprintf (file, "\n");
4542 }
4543 parms.release ();
4544 }
4545
4546 /* Dump the AV linked list. */
4547
4548 void
4549 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4550 {
4551 bool comma = false;
4552 fprintf (f, " Aggregate replacements:");
4553 for (; av; av = av->next)
4554 {
4555 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4556 av->index, av->offset);
4557 print_generic_expr (f, av->value, 0);
4558 comma = true;
4559 }
4560 fprintf (f, "\n");
4561 }
4562
4563 /* Stream out jump function JUMP_FUNC to OB. */
4564
4565 static void
4566 ipa_write_jump_function (struct output_block *ob,
4567 struct ipa_jump_func *jump_func)
4568 {
4569 struct ipa_agg_jf_item *item;
4570 struct bitpack_d bp;
4571 int i, count;
4572
4573 streamer_write_uhwi (ob, jump_func->type);
4574 switch (jump_func->type)
4575 {
4576 case IPA_JF_UNKNOWN:
4577 break;
4578 case IPA_JF_KNOWN_TYPE:
4579 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4580 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4581 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4582 break;
4583 case IPA_JF_CONST:
4584 gcc_assert (
4585 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4586 stream_write_tree (ob, jump_func->value.constant.value, true);
4587 break;
4588 case IPA_JF_PASS_THROUGH:
4589 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4590 if (jump_func->value.pass_through.operation == NOP_EXPR)
4591 {
4592 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4593 bp = bitpack_create (ob->main_stream);
4594 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4595 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4596 streamer_write_bitpack (&bp);
4597 }
4598 else
4599 {
4600 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4601 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4602 }
4603 break;
4604 case IPA_JF_ANCESTOR:
4605 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4606 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4607 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4608 bp = bitpack_create (ob->main_stream);
4609 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4610 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4611 streamer_write_bitpack (&bp);
4612 break;
4613 }
4614
4615 count = vec_safe_length (jump_func->agg.items);
4616 streamer_write_uhwi (ob, count);
4617 if (count)
4618 {
4619 bp = bitpack_create (ob->main_stream);
4620 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4621 streamer_write_bitpack (&bp);
4622 }
4623
4624 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4625 {
4626 streamer_write_uhwi (ob, item->offset);
4627 stream_write_tree (ob, item->value, true);
4628 }
4629 }
4630
4631 /* Read in jump function JUMP_FUNC from IB. */
4632
4633 static void
4634 ipa_read_jump_function (struct lto_input_block *ib,
4635 struct ipa_jump_func *jump_func,
4636 struct cgraph_edge *cs,
4637 struct data_in *data_in)
4638 {
4639 enum jump_func_type jftype;
4640 enum tree_code operation;
4641 int i, count;
4642
4643 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4644 switch (jftype)
4645 {
4646 case IPA_JF_UNKNOWN:
4647 jump_func->type = IPA_JF_UNKNOWN;
4648 break;
4649 case IPA_JF_KNOWN_TYPE:
4650 {
4651 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4652 tree base_type = stream_read_tree (ib, data_in);
4653 tree component_type = stream_read_tree (ib, data_in);
4654
4655 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4656 break;
4657 }
4658 case IPA_JF_CONST:
4659 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4660 break;
4661 case IPA_JF_PASS_THROUGH:
4662 operation = (enum tree_code) streamer_read_uhwi (ib);
4663 if (operation == NOP_EXPR)
4664 {
4665 int formal_id = streamer_read_uhwi (ib);
4666 struct bitpack_d bp = streamer_read_bitpack (ib);
4667 bool agg_preserved = bp_unpack_value (&bp, 1);
4668 bool type_preserved = bp_unpack_value (&bp, 1);
4669 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4670 type_preserved);
4671 }
4672 else
4673 {
4674 tree operand = stream_read_tree (ib, data_in);
4675 int formal_id = streamer_read_uhwi (ib);
4676 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4677 operation);
4678 }
4679 break;
4680 case IPA_JF_ANCESTOR:
4681 {
4682 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4683 tree type = stream_read_tree (ib, data_in);
4684 int formal_id = streamer_read_uhwi (ib);
4685 struct bitpack_d bp = streamer_read_bitpack (ib);
4686 bool agg_preserved = bp_unpack_value (&bp, 1);
4687 bool type_preserved = bp_unpack_value (&bp, 1);
4688
4689 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4690 type_preserved);
4691 break;
4692 }
4693 }
4694
4695 count = streamer_read_uhwi (ib);
4696 vec_alloc (jump_func->agg.items, count);
4697 if (count)
4698 {
4699 struct bitpack_d bp = streamer_read_bitpack (ib);
4700 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4701 }
4702 for (i = 0; i < count; i++)
4703 {
4704 struct ipa_agg_jf_item item;
4705 item.offset = streamer_read_uhwi (ib);
4706 item.value = stream_read_tree (ib, data_in);
4707 jump_func->agg.items->quick_push (item);
4708 }
4709 }
4710
4711 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4712 relevant to indirect inlining to OB. */
4713
4714 static void
4715 ipa_write_indirect_edge_info (struct output_block *ob,
4716 struct cgraph_edge *cs)
4717 {
4718 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4719 struct bitpack_d bp;
4720
4721 streamer_write_hwi (ob, ii->param_index);
4722 streamer_write_hwi (ob, ii->offset);
4723 bp = bitpack_create (ob->main_stream);
4724 bp_pack_value (&bp, ii->polymorphic, 1);
4725 bp_pack_value (&bp, ii->agg_contents, 1);
4726 bp_pack_value (&bp, ii->member_ptr, 1);
4727 bp_pack_value (&bp, ii->by_ref, 1);
4728 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4729 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4730 bp_pack_value (&bp, ii->speculative_maybe_derived_type, 1);
4731 streamer_write_bitpack (&bp);
4732
4733 if (ii->polymorphic)
4734 {
4735 streamer_write_hwi (ob, ii->otr_token);
4736 stream_write_tree (ob, ii->otr_type, true);
4737 stream_write_tree (ob, ii->outer_type, true);
4738 stream_write_tree (ob, ii->speculative_outer_type, true);
4739 if (ii->speculative_outer_type)
4740 streamer_write_hwi (ob, ii->speculative_offset);
4741 }
4742 }
4743
4744 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4745 relevant to indirect inlining from IB. */
4746
4747 static void
4748 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4749 struct data_in *data_in ATTRIBUTE_UNUSED,
4750 struct cgraph_edge *cs)
4751 {
4752 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4753 struct bitpack_d bp;
4754
4755 ii->param_index = (int) streamer_read_hwi (ib);
4756 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4757 bp = streamer_read_bitpack (ib);
4758 ii->polymorphic = bp_unpack_value (&bp, 1);
4759 ii->agg_contents = bp_unpack_value (&bp, 1);
4760 ii->member_ptr = bp_unpack_value (&bp, 1);
4761 ii->by_ref = bp_unpack_value (&bp, 1);
4762 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4763 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4764 ii->speculative_maybe_derived_type = bp_unpack_value (&bp, 1);
4765 if (ii->polymorphic)
4766 {
4767 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4768 ii->otr_type = stream_read_tree (ib, data_in);
4769 ii->outer_type = stream_read_tree (ib, data_in);
4770 ii->speculative_outer_type = stream_read_tree (ib, data_in);
4771 if (ii->speculative_outer_type)
4772 ii->speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4773 }
4774 }
4775
4776 /* Stream out NODE info to OB. */
4777
4778 static void
4779 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4780 {
4781 int node_ref;
4782 lto_symtab_encoder_t encoder;
4783 struct ipa_node_params *info = IPA_NODE_REF (node);
4784 int j;
4785 struct cgraph_edge *e;
4786 struct bitpack_d bp;
4787
4788 encoder = ob->decl_state->symtab_node_encoder;
4789 node_ref = lto_symtab_encoder_encode (encoder, node);
4790 streamer_write_uhwi (ob, node_ref);
4791
4792 streamer_write_uhwi (ob, ipa_get_param_count (info));
4793 for (j = 0; j < ipa_get_param_count (info); j++)
4794 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4795 bp = bitpack_create (ob->main_stream);
4796 gcc_assert (info->analysis_done
4797 || ipa_get_param_count (info) == 0);
4798 gcc_assert (!info->node_enqueued);
4799 gcc_assert (!info->ipcp_orig_node);
4800 for (j = 0; j < ipa_get_param_count (info); j++)
4801 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4802 streamer_write_bitpack (&bp);
4803 for (j = 0; j < ipa_get_param_count (info); j++)
4804 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4805 for (e = node->callees; e; e = e->next_callee)
4806 {
4807 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4808
4809 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4810 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4811 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4812 }
4813 for (e = node->indirect_calls; e; e = e->next_callee)
4814 {
4815 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4816
4817 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4818 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4819 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4820 ipa_write_indirect_edge_info (ob, e);
4821 }
4822 }
4823
4824 /* Stream in NODE info from IB. */
4825
4826 static void
4827 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4828 struct data_in *data_in)
4829 {
4830 struct ipa_node_params *info = IPA_NODE_REF (node);
4831 int k;
4832 struct cgraph_edge *e;
4833 struct bitpack_d bp;
4834
4835 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4836
4837 for (k = 0; k < ipa_get_param_count (info); k++)
4838 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4839
4840 bp = streamer_read_bitpack (ib);
4841 if (ipa_get_param_count (info) != 0)
4842 info->analysis_done = true;
4843 info->node_enqueued = false;
4844 for (k = 0; k < ipa_get_param_count (info); k++)
4845 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4846 for (k = 0; k < ipa_get_param_count (info); k++)
4847 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4848 for (e = node->callees; e; e = e->next_callee)
4849 {
4850 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4851 int count = streamer_read_uhwi (ib);
4852
4853 if (!count)
4854 continue;
4855 vec_safe_grow_cleared (args->jump_functions, count);
4856
4857 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4858 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4859 data_in);
4860 }
4861 for (e = node->indirect_calls; e; e = e->next_callee)
4862 {
4863 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4864 int count = streamer_read_uhwi (ib);
4865
4866 if (count)
4867 {
4868 vec_safe_grow_cleared (args->jump_functions, count);
4869 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4870 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4871 data_in);
4872 }
4873 ipa_read_indirect_edge_info (ib, data_in, e);
4874 }
4875 }
4876
4877 /* Write jump functions for nodes in SET. */
4878
4879 void
4880 ipa_prop_write_jump_functions (void)
4881 {
4882 struct cgraph_node *node;
4883 struct output_block *ob;
4884 unsigned int count = 0;
4885 lto_symtab_encoder_iterator lsei;
4886 lto_symtab_encoder_t encoder;
4887
4888
4889 if (!ipa_node_params_vector.exists ())
4890 return;
4891
4892 ob = create_output_block (LTO_section_jump_functions);
4893 encoder = ob->decl_state->symtab_node_encoder;
4894 ob->symbol = NULL;
4895 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4896 lsei_next_function_in_partition (&lsei))
4897 {
4898 node = lsei_cgraph_node (lsei);
4899 if (node->has_gimple_body_p ()
4900 && IPA_NODE_REF (node) != NULL)
4901 count++;
4902 }
4903
4904 streamer_write_uhwi (ob, count);
4905
4906 /* Process all of the functions. */
4907 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4908 lsei_next_function_in_partition (&lsei))
4909 {
4910 node = lsei_cgraph_node (lsei);
4911 if (node->has_gimple_body_p ()
4912 && IPA_NODE_REF (node) != NULL)
4913 ipa_write_node_info (ob, node);
4914 }
4915 streamer_write_char_stream (ob->main_stream, 0);
4916 produce_asm (ob, NULL);
4917 destroy_output_block (ob);
4918 }
4919
4920 /* Read section in file FILE_DATA of length LEN with data DATA. */
4921
4922 static void
4923 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4924 size_t len)
4925 {
4926 const struct lto_function_header *header =
4927 (const struct lto_function_header *) data;
4928 const int cfg_offset = sizeof (struct lto_function_header);
4929 const int main_offset = cfg_offset + header->cfg_size;
4930 const int string_offset = main_offset + header->main_size;
4931 struct data_in *data_in;
4932 unsigned int i;
4933 unsigned int count;
4934
4935 lto_input_block ib_main ((const char *) data + main_offset,
4936 header->main_size);
4937
4938 data_in =
4939 lto_data_in_create (file_data, (const char *) data + string_offset,
4940 header->string_size, vNULL);
4941 count = streamer_read_uhwi (&ib_main);
4942
4943 for (i = 0; i < count; i++)
4944 {
4945 unsigned int index;
4946 struct cgraph_node *node;
4947 lto_symtab_encoder_t encoder;
4948
4949 index = streamer_read_uhwi (&ib_main);
4950 encoder = file_data->symtab_node_encoder;
4951 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4952 index));
4953 gcc_assert (node->definition);
4954 ipa_read_node_info (&ib_main, node, data_in);
4955 }
4956 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4957 len);
4958 lto_data_in_delete (data_in);
4959 }
4960
4961 /* Read ipcp jump functions. */
4962
4963 void
4964 ipa_prop_read_jump_functions (void)
4965 {
4966 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4967 struct lto_file_decl_data *file_data;
4968 unsigned int j = 0;
4969
4970 ipa_check_create_node_params ();
4971 ipa_check_create_edge_args ();
4972 ipa_register_cgraph_hooks ();
4973
4974 while ((file_data = file_data_vec[j++]))
4975 {
4976 size_t len;
4977 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4978
4979 if (data)
4980 ipa_prop_read_section (file_data, data, len);
4981 }
4982 }
4983
4984 /* After merging units, we can get mismatch in argument counts.
4985 Also decl merging might've rendered parameter lists obsolete.
4986 Also compute called_with_variable_arg info. */
4987
4988 void
4989 ipa_update_after_lto_read (void)
4990 {
4991 ipa_check_create_node_params ();
4992 ipa_check_create_edge_args ();
4993 }
4994
4995 void
4996 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4997 {
4998 int node_ref;
4999 unsigned int count = 0;
5000 lto_symtab_encoder_t encoder;
5001 struct ipa_agg_replacement_value *aggvals, *av;
5002
5003 aggvals = ipa_get_agg_replacements_for_node (node);
5004 encoder = ob->decl_state->symtab_node_encoder;
5005 node_ref = lto_symtab_encoder_encode (encoder, node);
5006 streamer_write_uhwi (ob, node_ref);
5007
5008 for (av = aggvals; av; av = av->next)
5009 count++;
5010 streamer_write_uhwi (ob, count);
5011
5012 for (av = aggvals; av; av = av->next)
5013 {
5014 struct bitpack_d bp;
5015
5016 streamer_write_uhwi (ob, av->offset);
5017 streamer_write_uhwi (ob, av->index);
5018 stream_write_tree (ob, av->value, true);
5019
5020 bp = bitpack_create (ob->main_stream);
5021 bp_pack_value (&bp, av->by_ref, 1);
5022 streamer_write_bitpack (&bp);
5023 }
5024 }
5025
5026 /* Stream in the aggregate value replacement chain for NODE from IB. */
5027
5028 static void
5029 read_agg_replacement_chain (struct lto_input_block *ib,
5030 struct cgraph_node *node,
5031 struct data_in *data_in)
5032 {
5033 struct ipa_agg_replacement_value *aggvals = NULL;
5034 unsigned int count, i;
5035
5036 count = streamer_read_uhwi (ib);
5037 for (i = 0; i <count; i++)
5038 {
5039 struct ipa_agg_replacement_value *av;
5040 struct bitpack_d bp;
5041
5042 av = ggc_alloc<ipa_agg_replacement_value> ();
5043 av->offset = streamer_read_uhwi (ib);
5044 av->index = streamer_read_uhwi (ib);
5045 av->value = stream_read_tree (ib, data_in);
5046 bp = streamer_read_bitpack (ib);
5047 av->by_ref = bp_unpack_value (&bp, 1);
5048 av->next = aggvals;
5049 aggvals = av;
5050 }
5051 ipa_set_node_agg_value_chain (node, aggvals);
5052 }
5053
5054 /* Write all aggregate replacement for nodes in set. */
5055
5056 void
5057 ipa_prop_write_all_agg_replacement (void)
5058 {
5059 struct cgraph_node *node;
5060 struct output_block *ob;
5061 unsigned int count = 0;
5062 lto_symtab_encoder_iterator lsei;
5063 lto_symtab_encoder_t encoder;
5064
5065 if (!ipa_node_agg_replacements)
5066 return;
5067
5068 ob = create_output_block (LTO_section_ipcp_transform);
5069 encoder = ob->decl_state->symtab_node_encoder;
5070 ob->symbol = NULL;
5071 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5072 lsei_next_function_in_partition (&lsei))
5073 {
5074 node = lsei_cgraph_node (lsei);
5075 if (node->has_gimple_body_p ()
5076 && ipa_get_agg_replacements_for_node (node) != NULL)
5077 count++;
5078 }
5079
5080 streamer_write_uhwi (ob, count);
5081
5082 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5083 lsei_next_function_in_partition (&lsei))
5084 {
5085 node = lsei_cgraph_node (lsei);
5086 if (node->has_gimple_body_p ()
5087 && ipa_get_agg_replacements_for_node (node) != NULL)
5088 write_agg_replacement_chain (ob, node);
5089 }
5090 streamer_write_char_stream (ob->main_stream, 0);
5091 produce_asm (ob, NULL);
5092 destroy_output_block (ob);
5093 }
5094
5095 /* Read replacements section in file FILE_DATA of length LEN with data
5096 DATA. */
5097
5098 static void
5099 read_replacements_section (struct lto_file_decl_data *file_data,
5100 const char *data,
5101 size_t len)
5102 {
5103 const struct lto_function_header *header =
5104 (const struct lto_function_header *) data;
5105 const int cfg_offset = sizeof (struct lto_function_header);
5106 const int main_offset = cfg_offset + header->cfg_size;
5107 const int string_offset = main_offset + header->main_size;
5108 struct data_in *data_in;
5109 unsigned int i;
5110 unsigned int count;
5111
5112 lto_input_block ib_main ((const char *) data + main_offset,
5113 header->main_size);
5114
5115 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5116 header->string_size, vNULL);
5117 count = streamer_read_uhwi (&ib_main);
5118
5119 for (i = 0; i < count; i++)
5120 {
5121 unsigned int index;
5122 struct cgraph_node *node;
5123 lto_symtab_encoder_t encoder;
5124
5125 index = streamer_read_uhwi (&ib_main);
5126 encoder = file_data->symtab_node_encoder;
5127 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5128 index));
5129 gcc_assert (node->definition);
5130 read_agg_replacement_chain (&ib_main, node, data_in);
5131 }
5132 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5133 len);
5134 lto_data_in_delete (data_in);
5135 }
5136
5137 /* Read IPA-CP aggregate replacements. */
5138
5139 void
5140 ipa_prop_read_all_agg_replacement (void)
5141 {
5142 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5143 struct lto_file_decl_data *file_data;
5144 unsigned int j = 0;
5145
5146 while ((file_data = file_data_vec[j++]))
5147 {
5148 size_t len;
5149 const char *data = lto_get_section_data (file_data,
5150 LTO_section_ipcp_transform,
5151 NULL, &len);
5152 if (data)
5153 read_replacements_section (file_data, data, len);
5154 }
5155 }
5156
5157 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5158 NODE. */
5159
5160 static void
5161 adjust_agg_replacement_values (struct cgraph_node *node,
5162 struct ipa_agg_replacement_value *aggval)
5163 {
5164 struct ipa_agg_replacement_value *v;
5165 int i, c = 0, d = 0, *adj;
5166
5167 if (!node->clone.combined_args_to_skip)
5168 return;
5169
5170 for (v = aggval; v; v = v->next)
5171 {
5172 gcc_assert (v->index >= 0);
5173 if (c < v->index)
5174 c = v->index;
5175 }
5176 c++;
5177
5178 adj = XALLOCAVEC (int, c);
5179 for (i = 0; i < c; i++)
5180 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5181 {
5182 adj[i] = -1;
5183 d++;
5184 }
5185 else
5186 adj[i] = i - d;
5187
5188 for (v = aggval; v; v = v->next)
5189 v->index = adj[v->index];
5190 }
5191
5192 /* Dominator walker driving the ipcp modification phase. */
5193
5194 class ipcp_modif_dom_walker : public dom_walker
5195 {
5196 public:
5197 ipcp_modif_dom_walker (struct func_body_info *fbi,
5198 vec<ipa_param_descriptor> descs,
5199 struct ipa_agg_replacement_value *av,
5200 bool *sc, bool *cc)
5201 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5202 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5203
5204 virtual void before_dom_children (basic_block);
5205
5206 private:
5207 struct func_body_info *m_fbi;
5208 vec<ipa_param_descriptor> m_descriptors;
5209 struct ipa_agg_replacement_value *m_aggval;
5210 bool *m_something_changed, *m_cfg_changed;
5211 };
5212
5213 void
5214 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5215 {
5216 gimple_stmt_iterator gsi;
5217 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5218 {
5219 struct ipa_agg_replacement_value *v;
5220 gimple stmt = gsi_stmt (gsi);
5221 tree rhs, val, t;
5222 HOST_WIDE_INT offset, size;
5223 int index;
5224 bool by_ref, vce;
5225
5226 if (!gimple_assign_load_p (stmt))
5227 continue;
5228 rhs = gimple_assign_rhs1 (stmt);
5229 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5230 continue;
5231
5232 vce = false;
5233 t = rhs;
5234 while (handled_component_p (t))
5235 {
5236 /* V_C_E can do things like convert an array of integers to one
5237 bigger integer and similar things we do not handle below. */
5238 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5239 {
5240 vce = true;
5241 break;
5242 }
5243 t = TREE_OPERAND (t, 0);
5244 }
5245 if (vce)
5246 continue;
5247
5248 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5249 &offset, &size, &by_ref))
5250 continue;
5251 for (v = m_aggval; v; v = v->next)
5252 if (v->index == index
5253 && v->offset == offset)
5254 break;
5255 if (!v
5256 || v->by_ref != by_ref
5257 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5258 continue;
5259
5260 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5261 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5262 {
5263 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5264 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5265 else if (TYPE_SIZE (TREE_TYPE (rhs))
5266 == TYPE_SIZE (TREE_TYPE (v->value)))
5267 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5268 else
5269 {
5270 if (dump_file)
5271 {
5272 fprintf (dump_file, " const ");
5273 print_generic_expr (dump_file, v->value, 0);
5274 fprintf (dump_file, " can't be converted to type of ");
5275 print_generic_expr (dump_file, rhs, 0);
5276 fprintf (dump_file, "\n");
5277 }
5278 continue;
5279 }
5280 }
5281 else
5282 val = v->value;
5283
5284 if (dump_file && (dump_flags & TDF_DETAILS))
5285 {
5286 fprintf (dump_file, "Modifying stmt:\n ");
5287 print_gimple_stmt (dump_file, stmt, 0, 0);
5288 }
5289 gimple_assign_set_rhs_from_tree (&gsi, val);
5290 update_stmt (stmt);
5291
5292 if (dump_file && (dump_flags & TDF_DETAILS))
5293 {
5294 fprintf (dump_file, "into:\n ");
5295 print_gimple_stmt (dump_file, stmt, 0, 0);
5296 fprintf (dump_file, "\n");
5297 }
5298
5299 *m_something_changed = true;
5300 if (maybe_clean_eh_stmt (stmt)
5301 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5302 *m_cfg_changed = true;
5303 }
5304
5305 }
5306
5307 /* IPCP transformation phase doing propagation of aggregate values. */
5308
5309 unsigned int
5310 ipcp_transform_function (struct cgraph_node *node)
5311 {
5312 vec<ipa_param_descriptor> descriptors = vNULL;
5313 struct func_body_info fbi;
5314 struct ipa_agg_replacement_value *aggval;
5315 int param_count;
5316 bool cfg_changed = false, something_changed = false;
5317
5318 gcc_checking_assert (cfun);
5319 gcc_checking_assert (current_function_decl);
5320
5321 if (dump_file)
5322 fprintf (dump_file, "Modification phase of node %s/%i\n",
5323 node->name (), node->order);
5324
5325 aggval = ipa_get_agg_replacements_for_node (node);
5326 if (!aggval)
5327 return 0;
5328 param_count = count_formal_params (node->decl);
5329 if (param_count == 0)
5330 return 0;
5331 adjust_agg_replacement_values (node, aggval);
5332 if (dump_file)
5333 ipa_dump_agg_replacement_values (dump_file, aggval);
5334
5335 fbi.node = node;
5336 fbi.info = NULL;
5337 fbi.bb_infos = vNULL;
5338 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5339 fbi.param_count = param_count;
5340 fbi.aa_walked = 0;
5341
5342 descriptors.safe_grow_cleared (param_count);
5343 ipa_populate_param_decls (node, descriptors);
5344 calculate_dominance_info (CDI_DOMINATORS);
5345 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5346 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5347
5348 int i;
5349 struct ipa_bb_info *bi;
5350 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5351 free_ipa_bb_info (bi);
5352 fbi.bb_infos.release ();
5353 free_dominance_info (CDI_DOMINATORS);
5354 (*ipa_node_agg_replacements)[node->uid] = NULL;
5355 descriptors.release ();
5356
5357 if (!something_changed)
5358 return 0;
5359 else if (cfg_changed)
5360 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5361 else
5362 return TODO_update_ssa_only_virtuals;
5363 }