cgraph.c (cgraph_create_indirect_edge): Update call of get_polymorphic_call_info.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "dbgcnt.h"
63 #include "domwalk.h"
64 #include "builtins.h"
65 #include "calls.h"
66
67 /* Intermediate information that we get from alias analysis about a particular
68 parameter in a particular basic_block. When a parameter or the memory it
69 references is marked modified, we use that information in all dominatd
70 blocks without cosulting alias analysis oracle. */
71
72 struct param_aa_status
73 {
74 /* Set when this structure contains meaningful information. If not, the
75 structure describing a dominating BB should be used instead. */
76 bool valid;
77
78 /* Whether we have seen something which might have modified the data in
79 question. PARM is for the parameter itself, REF is for data it points to
80 but using the alias type of individual accesses and PT is the same thing
81 but for computing aggregate pass-through functions using a very inclusive
82 ao_ref. */
83 bool parm_modified, ref_modified, pt_modified;
84 };
85
86 /* Information related to a given BB that used only when looking at function
87 body. */
88
89 struct ipa_bb_info
90 {
91 /* Call graph edges going out of this BB. */
92 vec<cgraph_edge_p> cg_edges;
93 /* Alias analysis statuses of each formal parameter at this bb. */
94 vec<param_aa_status> param_aa_statuses;
95 };
96
97 /* Structure with global information that is only used when looking at function
98 body. */
99
100 struct func_body_info
101 {
102 /* The node that is being analyzed. */
103 cgraph_node *node;
104
105 /* Its info. */
106 struct ipa_node_params *info;
107
108 /* Information about individual BBs. */
109 vec<ipa_bb_info> bb_infos;
110
111 /* Number of parameters. */
112 int param_count;
113
114 /* Number of statements already walked by when analyzing this function. */
115 unsigned int aa_walked;
116 };
117
118 /* Vector where the parameter infos are actually stored. */
119 vec<ipa_node_params> ipa_node_params_vector;
120 /* Vector of known aggregate values in cloned nodes. */
121 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
122 /* Vector where the parameter infos are actually stored. */
123 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
124
125 /* Holders of ipa cgraph hooks: */
126 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
127 static struct cgraph_node_hook_list *node_removal_hook_holder;
128 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
129 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
130 static struct cgraph_node_hook_list *function_insertion_hook_holder;
131
132 /* Description of a reference to an IPA constant. */
133 struct ipa_cst_ref_desc
134 {
135 /* Edge that corresponds to the statement which took the reference. */
136 struct cgraph_edge *cs;
137 /* Linked list of duplicates created when call graph edges are cloned. */
138 struct ipa_cst_ref_desc *next_duplicate;
139 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
140 if out of control. */
141 int refcount;
142 };
143
144 /* Allocation pool for reference descriptions. */
145
146 static alloc_pool ipa_refdesc_pool;
147
148 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
149 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
150
151 static bool
152 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
153 {
154 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
155 struct cl_optimization *os;
156
157 if (!fs_opts)
158 return false;
159 os = TREE_OPTIMIZATION (fs_opts);
160 return !os->x_optimize || !os->x_flag_ipa_cp;
161 }
162
163 /* Return index of the formal whose tree is PTREE in function which corresponds
164 to INFO. */
165
166 static int
167 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
168 {
169 int i, count;
170
171 count = descriptors.length ();
172 for (i = 0; i < count; i++)
173 if (descriptors[i].decl == ptree)
174 return i;
175
176 return -1;
177 }
178
179 /* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
181
182 int
183 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
184 {
185 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
186 }
187
188 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
189 NODE. */
190
191 static void
192 ipa_populate_param_decls (struct cgraph_node *node,
193 vec<ipa_param_descriptor> &descriptors)
194 {
195 tree fndecl;
196 tree fnargs;
197 tree parm;
198 int param_num;
199
200 fndecl = node->decl;
201 gcc_assert (gimple_has_body_p (fndecl));
202 fnargs = DECL_ARGUMENTS (fndecl);
203 param_num = 0;
204 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
205 {
206 descriptors[param_num].decl = parm;
207 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
208 param_num++;
209 }
210 }
211
212 /* Return how many formal parameters FNDECL has. */
213
214 static inline int
215 count_formal_params (tree fndecl)
216 {
217 tree parm;
218 int count = 0;
219 gcc_assert (gimple_has_body_p (fndecl));
220
221 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
222 count++;
223
224 return count;
225 }
226
227 /* Return the declaration of Ith formal parameter of the function corresponding
228 to INFO. Note there is no setter function as this array is built just once
229 using ipa_initialize_node_params. */
230
231 void
232 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
233 {
234 fprintf (file, "param #%i", i);
235 if (info->descriptors[i].decl)
236 {
237 fprintf (file, " ");
238 print_generic_expr (file, info->descriptors[i].decl, 0);
239 }
240 }
241
242 /* Initialize the ipa_node_params structure associated with NODE
243 to hold PARAM_COUNT parameters. */
244
245 void
246 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
247 {
248 struct ipa_node_params *info = IPA_NODE_REF (node);
249
250 if (!info->descriptors.exists () && param_count)
251 info->descriptors.safe_grow_cleared (param_count);
252 }
253
254 /* Initialize the ipa_node_params structure associated with NODE by counting
255 the function parameters, creating the descriptors and populating their
256 param_decls. */
257
258 void
259 ipa_initialize_node_params (struct cgraph_node *node)
260 {
261 struct ipa_node_params *info = IPA_NODE_REF (node);
262
263 if (!info->descriptors.exists ())
264 {
265 ipa_alloc_node_params (node, count_formal_params (node->decl));
266 ipa_populate_param_decls (node, info->descriptors);
267 }
268 }
269
270 /* Print the jump functions associated with call graph edge CS to file F. */
271
272 static void
273 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
274 {
275 int i, count;
276
277 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
278 for (i = 0; i < count; i++)
279 {
280 struct ipa_jump_func *jump_func;
281 enum jump_func_type type;
282
283 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
284 type = jump_func->type;
285
286 fprintf (f, " param %d: ", i);
287 if (type == IPA_JF_UNKNOWN)
288 fprintf (f, "UNKNOWN\n");
289 else if (type == IPA_JF_KNOWN_TYPE)
290 {
291 fprintf (f, "KNOWN TYPE: base ");
292 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
293 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
294 jump_func->value.known_type.offset);
295 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
296 fprintf (f, "\n");
297 }
298 else if (type == IPA_JF_CONST)
299 {
300 tree val = jump_func->value.constant.value;
301 fprintf (f, "CONST: ");
302 print_generic_expr (f, val, 0);
303 if (TREE_CODE (val) == ADDR_EXPR
304 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
305 {
306 fprintf (f, " -> ");
307 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
308 0);
309 }
310 fprintf (f, "\n");
311 }
312 else if (type == IPA_JF_PASS_THROUGH)
313 {
314 fprintf (f, "PASS THROUGH: ");
315 fprintf (f, "%d, op %s",
316 jump_func->value.pass_through.formal_id,
317 get_tree_code_name(jump_func->value.pass_through.operation));
318 if (jump_func->value.pass_through.operation != NOP_EXPR)
319 {
320 fprintf (f, " ");
321 print_generic_expr (f,
322 jump_func->value.pass_through.operand, 0);
323 }
324 if (jump_func->value.pass_through.agg_preserved)
325 fprintf (f, ", agg_preserved");
326 if (jump_func->value.pass_through.type_preserved)
327 fprintf (f, ", type_preserved");
328 fprintf (f, "\n");
329 }
330 else if (type == IPA_JF_ANCESTOR)
331 {
332 fprintf (f, "ANCESTOR: ");
333 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
334 jump_func->value.ancestor.formal_id,
335 jump_func->value.ancestor.offset);
336 print_generic_expr (f, jump_func->value.ancestor.type, 0);
337 if (jump_func->value.ancestor.agg_preserved)
338 fprintf (f, ", agg_preserved");
339 if (jump_func->value.ancestor.type_preserved)
340 fprintf (f, ", type_preserved");
341 fprintf (f, "\n");
342 }
343
344 if (jump_func->agg.items)
345 {
346 struct ipa_agg_jf_item *item;
347 int j;
348
349 fprintf (f, " Aggregate passed by %s:\n",
350 jump_func->agg.by_ref ? "reference" : "value");
351 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
352 {
353 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
354 item->offset);
355 if (TYPE_P (item->value))
356 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
357 tree_to_uhwi (TYPE_SIZE (item->value)));
358 else
359 {
360 fprintf (f, "cst: ");
361 print_generic_expr (f, item->value, 0);
362 }
363 fprintf (f, "\n");
364 }
365 }
366 }
367 }
368
369
370 /* Print the jump functions of all arguments on all call graph edges going from
371 NODE to file F. */
372
373 void
374 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
375 {
376 struct cgraph_edge *cs;
377
378 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
379 node->order);
380 for (cs = node->callees; cs; cs = cs->next_callee)
381 {
382 if (!ipa_edge_args_info_available_for_edge_p (cs))
383 continue;
384
385 fprintf (f, " callsite %s/%i -> %s/%i : \n",
386 xstrdup (node->name ()), node->order,
387 xstrdup (cs->callee->name ()),
388 cs->callee->order);
389 ipa_print_node_jump_functions_for_edge (f, cs);
390 }
391
392 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
393 {
394 struct cgraph_indirect_call_info *ii;
395 if (!ipa_edge_args_info_available_for_edge_p (cs))
396 continue;
397
398 ii = cs->indirect_info;
399 if (ii->agg_contents)
400 fprintf (f, " indirect %s callsite, calling param %i, "
401 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
402 ii->member_ptr ? "member ptr" : "aggregate",
403 ii->param_index, ii->offset,
404 ii->by_ref ? "by reference" : "by_value");
405 else
406 fprintf (f, " indirect %s callsite, calling param %i, "
407 "offset " HOST_WIDE_INT_PRINT_DEC,
408 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
409 ii->offset);
410
411 if (cs->call_stmt)
412 {
413 fprintf (f, ", for stmt ");
414 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
415 }
416 else
417 fprintf (f, "\n");
418 ipa_print_node_jump_functions_for_edge (f, cs);
419 }
420 }
421
422 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
423
424 void
425 ipa_print_all_jump_functions (FILE *f)
426 {
427 struct cgraph_node *node;
428
429 fprintf (f, "\nJump functions:\n");
430 FOR_EACH_FUNCTION (node)
431 {
432 ipa_print_node_jump_functions (f, node);
433 }
434 }
435
436 /* Set JFUNC to be a known type jump function. */
437
438 static void
439 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
440 tree base_type, tree component_type)
441 {
442 /* Recording and propagating main variants increases change that types
443 will match. */
444 base_type = TYPE_MAIN_VARIANT (base_type);
445 component_type = TYPE_MAIN_VARIANT (component_type);
446
447 gcc_assert (contains_polymorphic_type_p (base_type)
448 && contains_polymorphic_type_p (component_type));
449 if (!flag_devirtualize)
450 return;
451 jfunc->type = IPA_JF_KNOWN_TYPE;
452 jfunc->value.known_type.offset = offset,
453 jfunc->value.known_type.base_type = base_type;
454 jfunc->value.known_type.component_type = component_type;
455 gcc_assert (component_type);
456 }
457
458 /* Set JFUNC to be a copy of another jmp (to be used by jump function
459 combination code). The two functions will share their rdesc. */
460
461 static void
462 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
463 struct ipa_jump_func *src)
464
465 {
466 gcc_checking_assert (src->type == IPA_JF_CONST);
467 dst->type = IPA_JF_CONST;
468 dst->value.constant = src->value.constant;
469 }
470
471 /* Set JFUNC to be a constant jmp function. */
472
473 static void
474 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
475 struct cgraph_edge *cs)
476 {
477 constant = unshare_expr (constant);
478 if (constant && EXPR_P (constant))
479 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
480 jfunc->type = IPA_JF_CONST;
481 jfunc->value.constant.value = unshare_expr_without_location (constant);
482
483 if (TREE_CODE (constant) == ADDR_EXPR
484 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
485 {
486 struct ipa_cst_ref_desc *rdesc;
487 if (!ipa_refdesc_pool)
488 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
489 sizeof (struct ipa_cst_ref_desc), 32);
490
491 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
492 rdesc->cs = cs;
493 rdesc->next_duplicate = NULL;
494 rdesc->refcount = 1;
495 jfunc->value.constant.rdesc = rdesc;
496 }
497 else
498 jfunc->value.constant.rdesc = NULL;
499 }
500
501 /* Set JFUNC to be a simple pass-through jump function. */
502 static void
503 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
504 bool agg_preserved, bool type_preserved)
505 {
506 jfunc->type = IPA_JF_PASS_THROUGH;
507 jfunc->value.pass_through.operand = NULL_TREE;
508 jfunc->value.pass_through.formal_id = formal_id;
509 jfunc->value.pass_through.operation = NOP_EXPR;
510 jfunc->value.pass_through.agg_preserved = agg_preserved;
511 jfunc->value.pass_through.type_preserved = type_preserved;
512 }
513
514 /* Set JFUNC to be an arithmetic pass through jump function. */
515
516 static void
517 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
518 tree operand, enum tree_code operation)
519 {
520 jfunc->type = IPA_JF_PASS_THROUGH;
521 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
522 jfunc->value.pass_through.formal_id = formal_id;
523 jfunc->value.pass_through.operation = operation;
524 jfunc->value.pass_through.agg_preserved = false;
525 jfunc->value.pass_through.type_preserved = false;
526 }
527
528 /* Set JFUNC to be an ancestor jump function. */
529
530 static void
531 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
532 tree type, int formal_id, bool agg_preserved,
533 bool type_preserved)
534 {
535 if (!flag_devirtualize)
536 type_preserved = false;
537 if (!type_preserved)
538 type = NULL_TREE;
539 if (type)
540 type = TYPE_MAIN_VARIANT (type);
541 gcc_assert (!type_preserved || contains_polymorphic_type_p (type));
542 jfunc->type = IPA_JF_ANCESTOR;
543 jfunc->value.ancestor.formal_id = formal_id;
544 jfunc->value.ancestor.offset = offset;
545 jfunc->value.ancestor.type = type_preserved ? type : NULL;
546 jfunc->value.ancestor.agg_preserved = agg_preserved;
547 jfunc->value.ancestor.type_preserved = type_preserved;
548 }
549
550 /* Extract the acual BINFO being described by JFUNC which must be a known type
551 jump function. */
552
553 tree
554 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
555 {
556 if (!RECORD_OR_UNION_TYPE_P (jfunc->value.known_type.base_type))
557 return NULL_TREE;
558
559 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
560
561 if (!base_binfo)
562 return NULL_TREE;
563 return get_binfo_at_offset (base_binfo,
564 jfunc->value.known_type.offset,
565 jfunc->value.known_type.component_type);
566 }
567
568 /* Get IPA BB information about the given BB. FBI is the context of analyzis
569 of this function body. */
570
571 static struct ipa_bb_info *
572 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
573 {
574 gcc_checking_assert (fbi);
575 return &fbi->bb_infos[bb->index];
576 }
577
578 /* Structure to be passed in between detect_type_change and
579 check_stmt_for_type_change. */
580
581 struct type_change_info
582 {
583 /* Offset into the object where there is the virtual method pointer we are
584 looking for. */
585 HOST_WIDE_INT offset;
586 /* The declaration or SSA_NAME pointer of the base that we are checking for
587 type change. */
588 tree object;
589 /* If we actually can tell the type that the object has changed to, it is
590 stored in this field. Otherwise it remains NULL_TREE. */
591 tree known_current_type;
592 /* Set to true if dynamic type change has been detected. */
593 bool type_maybe_changed;
594 /* Set to true if multiple types have been encountered. known_current_type
595 must be disregarded in that case. */
596 bool multiple_types_encountered;
597 };
598
599 /* Return true if STMT can modify a virtual method table pointer.
600
601 This function makes special assumptions about both constructors and
602 destructors which are all the functions that are allowed to alter the VMT
603 pointers. It assumes that destructors begin with assignment into all VMT
604 pointers and that constructors essentially look in the following way:
605
606 1) The very first thing they do is that they call constructors of ancestor
607 sub-objects that have them.
608
609 2) Then VMT pointers of this and all its ancestors is set to new values
610 corresponding to the type corresponding to the constructor.
611
612 3) Only afterwards, other stuff such as constructor of member sub-objects
613 and the code written by the user is run. Only this may include calling
614 virtual functions, directly or indirectly.
615
616 There is no way to call a constructor of an ancestor sub-object in any
617 other way.
618
619 This means that we do not have to care whether constructors get the correct
620 type information because they will always change it (in fact, if we define
621 the type to be given by the VMT pointer, it is undefined).
622
623 The most important fact to derive from the above is that if, for some
624 statement in the section 3, we try to detect whether the dynamic type has
625 changed, we can safely ignore all calls as we examine the function body
626 backwards until we reach statements in section 2 because these calls cannot
627 be ancestor constructors or destructors (if the input is not bogus) and so
628 do not change the dynamic type (this holds true only for automatically
629 allocated objects but at the moment we devirtualize only these). We then
630 must detect that statements in section 2 change the dynamic type and can try
631 to derive the new type. That is enough and we can stop, we will never see
632 the calls into constructors of sub-objects in this code. Therefore we can
633 safely ignore all call statements that we traverse.
634 */
635
636 static bool
637 stmt_may_be_vtbl_ptr_store (gimple stmt)
638 {
639 if (is_gimple_call (stmt))
640 return false;
641 /* TODO: Skip clobbers, doing so triggers problem in PR60306. */
642 else if (is_gimple_assign (stmt))
643 {
644 tree lhs = gimple_assign_lhs (stmt);
645
646 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
647 {
648 if (flag_strict_aliasing
649 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
650 return false;
651
652 if (TREE_CODE (lhs) == COMPONENT_REF
653 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
654 return false;
655 /* In the future we might want to use get_base_ref_and_offset to find
656 if there is a field corresponding to the offset and if so, proceed
657 almost like if it was a component ref. */
658 }
659 }
660 return true;
661 }
662
663 /* If STMT can be proved to be an assignment to the virtual method table
664 pointer of ANALYZED_OBJ and the type associated with the new table
665 identified, return the type. Otherwise return NULL_TREE. */
666
667 static tree
668 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
669 {
670 HOST_WIDE_INT offset, size, max_size;
671 tree lhs, rhs, base, binfo;
672
673 if (!gimple_assign_single_p (stmt))
674 return NULL_TREE;
675
676 lhs = gimple_assign_lhs (stmt);
677 rhs = gimple_assign_rhs1 (stmt);
678 if (TREE_CODE (lhs) != COMPONENT_REF
679 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
680 return NULL_TREE;
681
682 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
683 if (offset != tci->offset
684 || size != POINTER_SIZE
685 || max_size != POINTER_SIZE)
686 return NULL_TREE;
687 if (TREE_CODE (base) == MEM_REF)
688 {
689 if (TREE_CODE (tci->object) != MEM_REF
690 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
691 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
692 TREE_OPERAND (base, 1)))
693 return NULL_TREE;
694 }
695 else if (tci->object != base)
696 return NULL_TREE;
697
698 binfo = vtable_pointer_value_to_binfo (rhs);
699
700 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
701 base of outer type. In this case we would need to either
702 work on binfos or translate it back to outer type and offset.
703 KNOWN_TYPE jump functions are not ready for that, yet. */
704 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
705 return NULL;
706
707 return BINFO_TYPE (binfo);
708 }
709
710 /* Callback of walk_aliased_vdefs and a helper function for
711 detect_type_change to check whether a particular statement may modify
712 the virtual table pointer, and if possible also determine the new type of
713 the (sub-)object. It stores its result into DATA, which points to a
714 type_change_info structure. */
715
716 static bool
717 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
718 {
719 gimple stmt = SSA_NAME_DEF_STMT (vdef);
720 struct type_change_info *tci = (struct type_change_info *) data;
721
722 if (stmt_may_be_vtbl_ptr_store (stmt))
723 {
724 tree type;
725
726 type = extr_type_from_vtbl_ptr_store (stmt, tci);
727 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
728 if (tci->type_maybe_changed
729 && type != tci->known_current_type)
730 tci->multiple_types_encountered = true;
731 tci->known_current_type = type;
732 tci->type_maybe_changed = true;
733 return true;
734 }
735 else
736 return false;
737 }
738
739 /* See if ARG is PARAM_DECl describing instance passed by pointer
740 or reference in FUNCTION. Return false if the dynamic type may change
741 in between beggining of the function until CALL is invoked.
742
743 Generally functions are not allowed to change type of such instances,
744 but they call destructors. We assume that methods can not destroy the THIS
745 pointer. Also as a special cases, constructor and destructors may change
746 type of the THIS pointer. */
747
748 static bool
749 param_type_may_change_p (tree function, tree arg, gimple call)
750 {
751 /* Pure functions can not do any changes on the dynamic type;
752 that require writting to memory. */
753 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
754 return false;
755 /* We need to check if we are within inlined consturctor
756 or destructor (ideally we would have way to check that the
757 inline cdtor is actually working on ARG, but we don't have
758 easy tie on this, so punt on all non-pure cdtors.
759 We may also record the types of cdtors and once we know type
760 of the instance match them.
761
762 Also code unification optimizations may merge calls from
763 different blocks making return values unreliable. So
764 do nothing during late optimization. */
765 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
766 return true;
767 if (TREE_CODE (arg) == SSA_NAME
768 && SSA_NAME_IS_DEFAULT_DEF (arg)
769 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
770 {
771 /* Normal (non-THIS) argument. */
772 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
773 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
774 /* THIS pointer of an method - here we we want to watch constructors
775 and destructors as those definitely may change the dynamic
776 type. */
777 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
778 && !DECL_CXX_CONSTRUCTOR_P (function)
779 && !DECL_CXX_DESTRUCTOR_P (function)
780 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
781 {
782 /* Walk the inline stack and watch out for ctors/dtors. */
783 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
784 block = BLOCK_SUPERCONTEXT (block))
785 if (BLOCK_ABSTRACT_ORIGIN (block)
786 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
787 {
788 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
789
790 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
791 continue;
792 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
793 && (DECL_CXX_CONSTRUCTOR_P (fn)
794 || DECL_CXX_DESTRUCTOR_P (fn)))
795 return true;
796 }
797 return false;
798 }
799 }
800 return true;
801 }
802
803 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
804 callsite CALL) by looking for assignments to its virtual table pointer. If
805 it is, return true and fill in the jump function JFUNC with relevant type
806 information or set it to unknown. ARG is the object itself (not a pointer
807 to it, unless dereferenced). BASE is the base of the memory access as
808 returned by get_ref_base_and_extent, as is the offset.
809
810 This is helper function for detect_type_change and detect_type_change_ssa
811 that does the heavy work which is usually unnecesary. */
812
813 static bool
814 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
815 gimple call, struct ipa_jump_func *jfunc,
816 HOST_WIDE_INT offset)
817 {
818 struct type_change_info tci;
819 ao_ref ao;
820
821 gcc_checking_assert (DECL_P (arg)
822 || TREE_CODE (arg) == MEM_REF
823 || handled_component_p (arg));
824
825 comp_type = TYPE_MAIN_VARIANT (comp_type);
826
827 /* Const calls cannot call virtual methods through VMT and so type changes do
828 not matter. */
829 if (!flag_devirtualize || !gimple_vuse (call)
830 /* Be sure expected_type is polymorphic. */
831 || !comp_type
832 || TREE_CODE (comp_type) != RECORD_TYPE
833 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
834 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
835 return true;
836
837 ao_ref_init (&ao, arg);
838 ao.base = base;
839 ao.offset = offset;
840 ao.size = POINTER_SIZE;
841 ao.max_size = ao.size;
842
843 tci.offset = offset;
844 tci.object = get_base_address (arg);
845 tci.known_current_type = NULL_TREE;
846 tci.type_maybe_changed = false;
847 tci.multiple_types_encountered = false;
848
849 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
850 &tci, NULL);
851 if (!tci.type_maybe_changed)
852 return false;
853
854 if (!tci.known_current_type
855 || tci.multiple_types_encountered
856 || offset != 0)
857 jfunc->type = IPA_JF_UNKNOWN;
858 else
859 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
860
861 return true;
862 }
863
864 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
865 If it is, return true and fill in the jump function JFUNC with relevant type
866 information or set it to unknown. ARG is the object itself (not a pointer
867 to it, unless dereferenced). BASE is the base of the memory access as
868 returned by get_ref_base_and_extent, as is the offset. */
869
870 static bool
871 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
872 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
873 {
874 if (!flag_devirtualize)
875 return false;
876
877 if (TREE_CODE (base) == MEM_REF
878 && !param_type_may_change_p (current_function_decl,
879 TREE_OPERAND (base, 0),
880 call))
881 return false;
882 return detect_type_change_from_memory_writes (arg, base, comp_type,
883 call, jfunc, offset);
884 }
885
886 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
887 SSA name (its dereference will become the base and the offset is assumed to
888 be zero). */
889
890 static bool
891 detect_type_change_ssa (tree arg, tree comp_type,
892 gimple call, struct ipa_jump_func *jfunc)
893 {
894 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
895 if (!flag_devirtualize
896 || !POINTER_TYPE_P (TREE_TYPE (arg)))
897 return false;
898
899 if (!param_type_may_change_p (current_function_decl, arg, call))
900 return false;
901
902 arg = build2 (MEM_REF, ptr_type_node, arg,
903 build_int_cst (ptr_type_node, 0));
904
905 return detect_type_change_from_memory_writes (arg, arg, comp_type,
906 call, jfunc, 0);
907 }
908
909 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
910 boolean variable pointed to by DATA. */
911
912 static bool
913 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
914 void *data)
915 {
916 bool *b = (bool *) data;
917 *b = true;
918 return true;
919 }
920
921 /* Return true if we have already walked so many statements in AA that we
922 should really just start giving up. */
923
924 static bool
925 aa_overwalked (struct func_body_info *fbi)
926 {
927 gcc_checking_assert (fbi);
928 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
929 }
930
931 /* Find the nearest valid aa status for parameter specified by INDEX that
932 dominates BB. */
933
934 static struct param_aa_status *
935 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
936 int index)
937 {
938 while (true)
939 {
940 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
941 if (!bb)
942 return NULL;
943 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
944 if (!bi->param_aa_statuses.is_empty ()
945 && bi->param_aa_statuses[index].valid)
946 return &bi->param_aa_statuses[index];
947 }
948 }
949
950 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
951 structures and/or intialize the result with a dominating description as
952 necessary. */
953
954 static struct param_aa_status *
955 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
956 int index)
957 {
958 gcc_checking_assert (fbi);
959 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
960 if (bi->param_aa_statuses.is_empty ())
961 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
962 struct param_aa_status *paa = &bi->param_aa_statuses[index];
963 if (!paa->valid)
964 {
965 gcc_checking_assert (!paa->parm_modified
966 && !paa->ref_modified
967 && !paa->pt_modified);
968 struct param_aa_status *dom_paa;
969 dom_paa = find_dominating_aa_status (fbi, bb, index);
970 if (dom_paa)
971 *paa = *dom_paa;
972 else
973 paa->valid = true;
974 }
975
976 return paa;
977 }
978
979 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
980 a value known not to be modified in this function before reaching the
981 statement STMT. FBI holds information about the function we have so far
982 gathered but do not survive the summary building stage. */
983
984 static bool
985 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
986 gimple stmt, tree parm_load)
987 {
988 struct param_aa_status *paa;
989 bool modified = false;
990 ao_ref refd;
991
992 /* FIXME: FBI can be NULL if we are being called from outside
993 ipa_node_analysis or ipcp_transform_function, which currently happens
994 during inlining analysis. It would be great to extend fbi's lifetime and
995 always have it. Currently, we are just not afraid of too much walking in
996 that case. */
997 if (fbi)
998 {
999 if (aa_overwalked (fbi))
1000 return false;
1001 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1002 if (paa->parm_modified)
1003 return false;
1004 }
1005 else
1006 paa = NULL;
1007
1008 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1009 ao_ref_init (&refd, parm_load);
1010 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1011 &modified, NULL);
1012 if (fbi)
1013 fbi->aa_walked += walked;
1014 if (paa && modified)
1015 paa->parm_modified = true;
1016 return !modified;
1017 }
1018
1019 /* If STMT is an assignment that loads a value from an parameter declaration,
1020 return the index of the parameter in ipa_node_params which has not been
1021 modified. Otherwise return -1. */
1022
1023 static int
1024 load_from_unmodified_param (struct func_body_info *fbi,
1025 vec<ipa_param_descriptor> descriptors,
1026 gimple stmt)
1027 {
1028 int index;
1029 tree op1;
1030
1031 if (!gimple_assign_single_p (stmt))
1032 return -1;
1033
1034 op1 = gimple_assign_rhs1 (stmt);
1035 if (TREE_CODE (op1) != PARM_DECL)
1036 return -1;
1037
1038 index = ipa_get_param_decl_index_1 (descriptors, op1);
1039 if (index < 0
1040 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1041 return -1;
1042
1043 return index;
1044 }
1045
1046 /* Return true if memory reference REF (which must be a load through parameter
1047 with INDEX) loads data that are known to be unmodified in this function
1048 before reaching statement STMT. */
1049
1050 static bool
1051 parm_ref_data_preserved_p (struct func_body_info *fbi,
1052 int index, gimple stmt, tree ref)
1053 {
1054 struct param_aa_status *paa;
1055 bool modified = false;
1056 ao_ref refd;
1057
1058 /* FIXME: FBI can be NULL if we are being called from outside
1059 ipa_node_analysis or ipcp_transform_function, which currently happens
1060 during inlining analysis. It would be great to extend fbi's lifetime and
1061 always have it. Currently, we are just not afraid of too much walking in
1062 that case. */
1063 if (fbi)
1064 {
1065 if (aa_overwalked (fbi))
1066 return false;
1067 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1068 if (paa->ref_modified)
1069 return false;
1070 }
1071 else
1072 paa = NULL;
1073
1074 gcc_checking_assert (gimple_vuse (stmt));
1075 ao_ref_init (&refd, ref);
1076 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1077 &modified, NULL);
1078 if (fbi)
1079 fbi->aa_walked += walked;
1080 if (paa && modified)
1081 paa->ref_modified = true;
1082 return !modified;
1083 }
1084
1085 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1086 is known to be unmodified in this function before reaching call statement
1087 CALL into which it is passed. FBI describes the function body. */
1088
1089 static bool
1090 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1091 gimple call, tree parm)
1092 {
1093 bool modified = false;
1094 ao_ref refd;
1095
1096 /* It's unnecessary to calculate anything about memory contnets for a const
1097 function because it is not goin to use it. But do not cache the result
1098 either. Also, no such calculations for non-pointers. */
1099 if (!gimple_vuse (call)
1100 || !POINTER_TYPE_P (TREE_TYPE (parm))
1101 || aa_overwalked (fbi))
1102 return false;
1103
1104 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1105 index);
1106 if (paa->pt_modified)
1107 return false;
1108
1109 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1110 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1111 &modified, NULL);
1112 fbi->aa_walked += walked;
1113 if (modified)
1114 paa->pt_modified = true;
1115 return !modified;
1116 }
1117
1118 /* Return true if we can prove that OP is a memory reference loading unmodified
1119 data from an aggregate passed as a parameter and if the aggregate is passed
1120 by reference, that the alias type of the load corresponds to the type of the
1121 formal parameter (so that we can rely on this type for TBAA in callers).
1122 INFO and PARMS_AINFO describe parameters of the current function (but the
1123 latter can be NULL), STMT is the load statement. If function returns true,
1124 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1125 within the aggregate and whether it is a load from a value passed by
1126 reference respectively. */
1127
1128 static bool
1129 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1130 vec<ipa_param_descriptor> descriptors,
1131 gimple stmt, tree op, int *index_p,
1132 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1133 bool *by_ref_p)
1134 {
1135 int index;
1136 HOST_WIDE_INT size, max_size;
1137 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1138
1139 if (max_size == -1 || max_size != size || *offset_p < 0)
1140 return false;
1141
1142 if (DECL_P (base))
1143 {
1144 int index = ipa_get_param_decl_index_1 (descriptors, base);
1145 if (index >= 0
1146 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1147 {
1148 *index_p = index;
1149 *by_ref_p = false;
1150 if (size_p)
1151 *size_p = size;
1152 return true;
1153 }
1154 return false;
1155 }
1156
1157 if (TREE_CODE (base) != MEM_REF
1158 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1159 || !integer_zerop (TREE_OPERAND (base, 1)))
1160 return false;
1161
1162 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1163 {
1164 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1165 index = ipa_get_param_decl_index_1 (descriptors, parm);
1166 }
1167 else
1168 {
1169 /* This branch catches situations where a pointer parameter is not a
1170 gimple register, for example:
1171
1172 void hip7(S*) (struct S * p)
1173 {
1174 void (*<T2e4>) (struct S *) D.1867;
1175 struct S * p.1;
1176
1177 <bb 2>:
1178 p.1_1 = p;
1179 D.1867_2 = p.1_1->f;
1180 D.1867_2 ();
1181 gdp = &p;
1182 */
1183
1184 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1185 index = load_from_unmodified_param (fbi, descriptors, def);
1186 }
1187
1188 if (index >= 0
1189 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1190 {
1191 *index_p = index;
1192 *by_ref_p = true;
1193 if (size_p)
1194 *size_p = size;
1195 return true;
1196 }
1197 return false;
1198 }
1199
1200 /* Just like the previous function, just without the param_analysis_info
1201 pointer, for users outside of this file. */
1202
1203 bool
1204 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1205 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1206 bool *by_ref_p)
1207 {
1208 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1209 offset_p, NULL, by_ref_p);
1210 }
1211
1212 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1213 of an assignment statement STMT, try to determine whether we are actually
1214 handling any of the following cases and construct an appropriate jump
1215 function into JFUNC if so:
1216
1217 1) The passed value is loaded from a formal parameter which is not a gimple
1218 register (most probably because it is addressable, the value has to be
1219 scalar) and we can guarantee the value has not changed. This case can
1220 therefore be described by a simple pass-through jump function. For example:
1221
1222 foo (int a)
1223 {
1224 int a.0;
1225
1226 a.0_2 = a;
1227 bar (a.0_2);
1228
1229 2) The passed value can be described by a simple arithmetic pass-through
1230 jump function. E.g.
1231
1232 foo (int a)
1233 {
1234 int D.2064;
1235
1236 D.2064_4 = a.1(D) + 4;
1237 bar (D.2064_4);
1238
1239 This case can also occur in combination of the previous one, e.g.:
1240
1241 foo (int a, int z)
1242 {
1243 int a.0;
1244 int D.2064;
1245
1246 a.0_3 = a;
1247 D.2064_4 = a.0_3 + 4;
1248 foo (D.2064_4);
1249
1250 3) The passed value is an address of an object within another one (which
1251 also passed by reference). Such situations are described by an ancestor
1252 jump function and describe situations such as:
1253
1254 B::foo() (struct B * const this)
1255 {
1256 struct A * D.1845;
1257
1258 D.1845_2 = &this_1(D)->D.1748;
1259 A::bar (D.1845_2);
1260
1261 INFO is the structure describing individual parameters access different
1262 stages of IPA optimizations. PARMS_AINFO contains the information that is
1263 only needed for intraprocedural analysis. */
1264
1265 static void
1266 compute_complex_assign_jump_func (struct func_body_info *fbi,
1267 struct ipa_node_params *info,
1268 struct ipa_jump_func *jfunc,
1269 gimple call, gimple stmt, tree name,
1270 tree param_type)
1271 {
1272 HOST_WIDE_INT offset, size, max_size;
1273 tree op1, tc_ssa, base, ssa;
1274 int index;
1275
1276 op1 = gimple_assign_rhs1 (stmt);
1277
1278 if (TREE_CODE (op1) == SSA_NAME)
1279 {
1280 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1281 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1282 else
1283 index = load_from_unmodified_param (fbi, info->descriptors,
1284 SSA_NAME_DEF_STMT (op1));
1285 tc_ssa = op1;
1286 }
1287 else
1288 {
1289 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1290 tc_ssa = gimple_assign_lhs (stmt);
1291 }
1292
1293 if (index >= 0)
1294 {
1295 tree op2 = gimple_assign_rhs2 (stmt);
1296
1297 if (op2)
1298 {
1299 if (!is_gimple_ip_invariant (op2)
1300 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1301 && !useless_type_conversion_p (TREE_TYPE (name),
1302 TREE_TYPE (op1))))
1303 return;
1304
1305 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1306 gimple_assign_rhs_code (stmt));
1307 }
1308 else if (gimple_assign_single_p (stmt))
1309 {
1310 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1311 bool type_p = false;
1312
1313 if (param_type && POINTER_TYPE_P (param_type))
1314 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1315 call, jfunc);
1316 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1317 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1318 }
1319 return;
1320 }
1321
1322 if (TREE_CODE (op1) != ADDR_EXPR)
1323 return;
1324 op1 = TREE_OPERAND (op1, 0);
1325 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1326 return;
1327 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1328 if (TREE_CODE (base) != MEM_REF
1329 /* If this is a varying address, punt. */
1330 || max_size == -1
1331 || max_size != size)
1332 return;
1333 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1334 ssa = TREE_OPERAND (base, 0);
1335 if (TREE_CODE (ssa) != SSA_NAME
1336 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1337 || offset < 0)
1338 return;
1339
1340 /* Dynamic types are changed in constructors and destructors. */
1341 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1342 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1343 {
1344 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1345 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1346 call, jfunc, offset));
1347 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1348 ipa_set_ancestor_jf (jfunc, offset,
1349 type_p ? TREE_TYPE (param_type) : NULL, index,
1350 parm_ref_data_pass_through_p (fbi, index,
1351 call, ssa), type_p);
1352 }
1353 }
1354
1355 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1356 it looks like:
1357
1358 iftmp.1_3 = &obj_2(D)->D.1762;
1359
1360 The base of the MEM_REF must be a default definition SSA NAME of a
1361 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1362 whole MEM_REF expression is returned and the offset calculated from any
1363 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1364 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1365
1366 static tree
1367 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1368 {
1369 HOST_WIDE_INT size, max_size;
1370 tree expr, parm, obj;
1371
1372 if (!gimple_assign_single_p (assign))
1373 return NULL_TREE;
1374 expr = gimple_assign_rhs1 (assign);
1375
1376 if (TREE_CODE (expr) != ADDR_EXPR)
1377 return NULL_TREE;
1378 expr = TREE_OPERAND (expr, 0);
1379 obj = expr;
1380 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1381
1382 if (TREE_CODE (expr) != MEM_REF
1383 /* If this is a varying address, punt. */
1384 || max_size == -1
1385 || max_size != size
1386 || *offset < 0)
1387 return NULL_TREE;
1388 parm = TREE_OPERAND (expr, 0);
1389 if (TREE_CODE (parm) != SSA_NAME
1390 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1391 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1392 return NULL_TREE;
1393
1394 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1395 *obj_p = obj;
1396 return expr;
1397 }
1398
1399
1400 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1401 statement PHI, try to find out whether NAME is in fact a
1402 multiple-inheritance typecast from a descendant into an ancestor of a formal
1403 parameter and thus can be described by an ancestor jump function and if so,
1404 write the appropriate function into JFUNC.
1405
1406 Essentially we want to match the following pattern:
1407
1408 if (obj_2(D) != 0B)
1409 goto <bb 3>;
1410 else
1411 goto <bb 4>;
1412
1413 <bb 3>:
1414 iftmp.1_3 = &obj_2(D)->D.1762;
1415
1416 <bb 4>:
1417 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1418 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1419 return D.1879_6; */
1420
1421 static void
1422 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1423 struct ipa_node_params *info,
1424 struct ipa_jump_func *jfunc,
1425 gimple call, gimple phi, tree param_type)
1426 {
1427 HOST_WIDE_INT offset;
1428 gimple assign, cond;
1429 basic_block phi_bb, assign_bb, cond_bb;
1430 tree tmp, parm, expr, obj;
1431 int index, i;
1432
1433 if (gimple_phi_num_args (phi) != 2)
1434 return;
1435
1436 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1437 tmp = PHI_ARG_DEF (phi, 0);
1438 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1439 tmp = PHI_ARG_DEF (phi, 1);
1440 else
1441 return;
1442 if (TREE_CODE (tmp) != SSA_NAME
1443 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1444 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1445 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1446 return;
1447
1448 assign = SSA_NAME_DEF_STMT (tmp);
1449 assign_bb = gimple_bb (assign);
1450 if (!single_pred_p (assign_bb))
1451 return;
1452 expr = get_ancestor_addr_info (assign, &obj, &offset);
1453 if (!expr)
1454 return;
1455 parm = TREE_OPERAND (expr, 0);
1456 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1457 if (index < 0)
1458 return;
1459
1460 cond_bb = single_pred (assign_bb);
1461 cond = last_stmt (cond_bb);
1462 if (!cond
1463 || gimple_code (cond) != GIMPLE_COND
1464 || gimple_cond_code (cond) != NE_EXPR
1465 || gimple_cond_lhs (cond) != parm
1466 || !integer_zerop (gimple_cond_rhs (cond)))
1467 return;
1468
1469 phi_bb = gimple_bb (phi);
1470 for (i = 0; i < 2; i++)
1471 {
1472 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1473 if (pred != assign_bb && pred != cond_bb)
1474 return;
1475 }
1476
1477 bool type_p = false;
1478 if (param_type && POINTER_TYPE_P (param_type)
1479 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
1480 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1481 call, jfunc, offset);
1482 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1483 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1484 index,
1485 parm_ref_data_pass_through_p (fbi, index, call, parm),
1486 type_p);
1487 }
1488
1489 /* Given OP which is passed as an actual argument to a called function,
1490 determine if it is possible to construct a KNOWN_TYPE jump function for it
1491 and if so, create one and store it to JFUNC.
1492 EXPECTED_TYPE represents a type the argument should be in */
1493
1494 static void
1495 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1496 gimple call, tree expected_type)
1497 {
1498 HOST_WIDE_INT offset, size, max_size;
1499 tree base;
1500
1501 if (!flag_devirtualize
1502 || TREE_CODE (op) != ADDR_EXPR
1503 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
1504 /* Be sure expected_type is polymorphic. */
1505 || !expected_type
1506 || !contains_polymorphic_type_p (expected_type))
1507 return;
1508
1509 op = TREE_OPERAND (op, 0);
1510 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1511 if (!DECL_P (base)
1512 || max_size == -1
1513 || max_size != size
1514 || !contains_polymorphic_type_p (TREE_TYPE (base)))
1515 return;
1516
1517 if (decl_maybe_in_construction_p (base, TREE_TYPE (base),
1518 call, current_function_decl)
1519 /* Even if the var seems to be in construction by inline call stack,
1520 we may work out the actual type by walking memory writes. */
1521 && (!is_global_var (base)
1522 && detect_type_change (op, base, expected_type, call, jfunc, offset)))
1523 return;
1524
1525 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1526 expected_type);
1527 }
1528
1529 /* Inspect the given TYPE and return true iff it has the same structure (the
1530 same number of fields of the same types) as a C++ member pointer. If
1531 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1532 corresponding fields there. */
1533
1534 static bool
1535 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1536 {
1537 tree fld;
1538
1539 if (TREE_CODE (type) != RECORD_TYPE)
1540 return false;
1541
1542 fld = TYPE_FIELDS (type);
1543 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1544 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1545 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1546 return false;
1547
1548 if (method_ptr)
1549 *method_ptr = fld;
1550
1551 fld = DECL_CHAIN (fld);
1552 if (!fld || INTEGRAL_TYPE_P (fld)
1553 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1554 return false;
1555 if (delta)
1556 *delta = fld;
1557
1558 if (DECL_CHAIN (fld))
1559 return false;
1560
1561 return true;
1562 }
1563
1564 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1565 return the rhs of its defining statement. Otherwise return RHS as it
1566 is. */
1567
1568 static inline tree
1569 get_ssa_def_if_simple_copy (tree rhs)
1570 {
1571 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1572 {
1573 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1574
1575 if (gimple_assign_single_p (def_stmt))
1576 rhs = gimple_assign_rhs1 (def_stmt);
1577 else
1578 break;
1579 }
1580 return rhs;
1581 }
1582
1583 /* Simple linked list, describing known contents of an aggregate beforere
1584 call. */
1585
1586 struct ipa_known_agg_contents_list
1587 {
1588 /* Offset and size of the described part of the aggregate. */
1589 HOST_WIDE_INT offset, size;
1590 /* Known constant value or NULL if the contents is known to be unknown. */
1591 tree constant;
1592 /* Pointer to the next structure in the list. */
1593 struct ipa_known_agg_contents_list *next;
1594 };
1595
1596 /* Find the proper place in linked list of ipa_known_agg_contents_list
1597 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1598 unless there is a partial overlap, in which case return NULL, or such
1599 element is already there, in which case set *ALREADY_THERE to true. */
1600
1601 static struct ipa_known_agg_contents_list **
1602 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1603 HOST_WIDE_INT lhs_offset,
1604 HOST_WIDE_INT lhs_size,
1605 bool *already_there)
1606 {
1607 struct ipa_known_agg_contents_list **p = list;
1608 while (*p && (*p)->offset < lhs_offset)
1609 {
1610 if ((*p)->offset + (*p)->size > lhs_offset)
1611 return NULL;
1612 p = &(*p)->next;
1613 }
1614
1615 if (*p && (*p)->offset < lhs_offset + lhs_size)
1616 {
1617 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1618 /* We already know this value is subsequently overwritten with
1619 something else. */
1620 *already_there = true;
1621 else
1622 /* Otherwise this is a partial overlap which we cannot
1623 represent. */
1624 return NULL;
1625 }
1626 return p;
1627 }
1628
1629 /* Build aggregate jump function from LIST, assuming there are exactly
1630 CONST_COUNT constant entries there and that th offset of the passed argument
1631 is ARG_OFFSET and store it into JFUNC. */
1632
1633 static void
1634 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1635 int const_count, HOST_WIDE_INT arg_offset,
1636 struct ipa_jump_func *jfunc)
1637 {
1638 vec_alloc (jfunc->agg.items, const_count);
1639 while (list)
1640 {
1641 if (list->constant)
1642 {
1643 struct ipa_agg_jf_item item;
1644 item.offset = list->offset - arg_offset;
1645 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1646 item.value = unshare_expr_without_location (list->constant);
1647 jfunc->agg.items->quick_push (item);
1648 }
1649 list = list->next;
1650 }
1651 }
1652
1653 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1654 in ARG is filled in with constant values. ARG can either be an aggregate
1655 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1656 aggregate. JFUNC is the jump function into which the constants are
1657 subsequently stored. */
1658
1659 static void
1660 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1661 struct ipa_jump_func *jfunc)
1662 {
1663 struct ipa_known_agg_contents_list *list = NULL;
1664 int item_count = 0, const_count = 0;
1665 HOST_WIDE_INT arg_offset, arg_size;
1666 gimple_stmt_iterator gsi;
1667 tree arg_base;
1668 bool check_ref, by_ref;
1669 ao_ref r;
1670
1671 /* The function operates in three stages. First, we prepare check_ref, r,
1672 arg_base and arg_offset based on what is actually passed as an actual
1673 argument. */
1674
1675 if (POINTER_TYPE_P (arg_type))
1676 {
1677 by_ref = true;
1678 if (TREE_CODE (arg) == SSA_NAME)
1679 {
1680 tree type_size;
1681 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1682 return;
1683 check_ref = true;
1684 arg_base = arg;
1685 arg_offset = 0;
1686 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1687 arg_size = tree_to_uhwi (type_size);
1688 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1689 }
1690 else if (TREE_CODE (arg) == ADDR_EXPR)
1691 {
1692 HOST_WIDE_INT arg_max_size;
1693
1694 arg = TREE_OPERAND (arg, 0);
1695 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1696 &arg_max_size);
1697 if (arg_max_size == -1
1698 || arg_max_size != arg_size
1699 || arg_offset < 0)
1700 return;
1701 if (DECL_P (arg_base))
1702 {
1703 check_ref = false;
1704 ao_ref_init (&r, arg_base);
1705 }
1706 else
1707 return;
1708 }
1709 else
1710 return;
1711 }
1712 else
1713 {
1714 HOST_WIDE_INT arg_max_size;
1715
1716 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1717
1718 by_ref = false;
1719 check_ref = false;
1720 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1721 &arg_max_size);
1722 if (arg_max_size == -1
1723 || arg_max_size != arg_size
1724 || arg_offset < 0)
1725 return;
1726
1727 ao_ref_init (&r, arg);
1728 }
1729
1730 /* Second stage walks back the BB, looks at individual statements and as long
1731 as it is confident of how the statements affect contents of the
1732 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1733 describing it. */
1734 gsi = gsi_for_stmt (call);
1735 gsi_prev (&gsi);
1736 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1737 {
1738 struct ipa_known_agg_contents_list *n, **p;
1739 gimple stmt = gsi_stmt (gsi);
1740 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1741 tree lhs, rhs, lhs_base;
1742
1743 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1744 continue;
1745 if (!gimple_assign_single_p (stmt))
1746 break;
1747
1748 lhs = gimple_assign_lhs (stmt);
1749 rhs = gimple_assign_rhs1 (stmt);
1750 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1751 || TREE_CODE (lhs) == BIT_FIELD_REF
1752 || contains_bitfld_component_ref_p (lhs))
1753 break;
1754
1755 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1756 &lhs_max_size);
1757 if (lhs_max_size == -1
1758 || lhs_max_size != lhs_size)
1759 break;
1760
1761 if (check_ref)
1762 {
1763 if (TREE_CODE (lhs_base) != MEM_REF
1764 || TREE_OPERAND (lhs_base, 0) != arg_base
1765 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1766 break;
1767 }
1768 else if (lhs_base != arg_base)
1769 {
1770 if (DECL_P (lhs_base))
1771 continue;
1772 else
1773 break;
1774 }
1775
1776 bool already_there = false;
1777 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1778 &already_there);
1779 if (!p)
1780 break;
1781 if (already_there)
1782 continue;
1783
1784 rhs = get_ssa_def_if_simple_copy (rhs);
1785 n = XALLOCA (struct ipa_known_agg_contents_list);
1786 n->size = lhs_size;
1787 n->offset = lhs_offset;
1788 if (is_gimple_ip_invariant (rhs))
1789 {
1790 n->constant = rhs;
1791 const_count++;
1792 }
1793 else
1794 n->constant = NULL_TREE;
1795 n->next = *p;
1796 *p = n;
1797
1798 item_count++;
1799 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1800 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1801 break;
1802 }
1803
1804 /* Third stage just goes over the list and creates an appropriate vector of
1805 ipa_agg_jf_item structures out of it, of sourse only if there are
1806 any known constants to begin with. */
1807
1808 if (const_count)
1809 {
1810 jfunc->agg.by_ref = by_ref;
1811 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1812 }
1813 }
1814
1815 static tree
1816 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1817 {
1818 int n;
1819 tree type = (e->callee
1820 ? TREE_TYPE (e->callee->decl)
1821 : gimple_call_fntype (e->call_stmt));
1822 tree t = TYPE_ARG_TYPES (type);
1823
1824 for (n = 0; n < i; n++)
1825 {
1826 if (!t)
1827 break;
1828 t = TREE_CHAIN (t);
1829 }
1830 if (t)
1831 return TREE_VALUE (t);
1832 if (!e->callee)
1833 return NULL;
1834 t = DECL_ARGUMENTS (e->callee->decl);
1835 for (n = 0; n < i; n++)
1836 {
1837 if (!t)
1838 return NULL;
1839 t = TREE_CHAIN (t);
1840 }
1841 if (t)
1842 return TREE_TYPE (t);
1843 return NULL;
1844 }
1845
1846 /* Compute jump function for all arguments of callsite CS and insert the
1847 information in the jump_functions array in the ipa_edge_args corresponding
1848 to this callsite. */
1849
1850 static void
1851 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1852 struct cgraph_edge *cs)
1853 {
1854 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1855 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1856 gimple call = cs->call_stmt;
1857 int n, arg_num = gimple_call_num_args (call);
1858
1859 if (arg_num == 0 || args->jump_functions)
1860 return;
1861 vec_safe_grow_cleared (args->jump_functions, arg_num);
1862
1863 if (gimple_call_internal_p (call))
1864 return;
1865 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1866 return;
1867
1868 for (n = 0; n < arg_num; n++)
1869 {
1870 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1871 tree arg = gimple_call_arg (call, n);
1872 tree param_type = ipa_get_callee_param_type (cs, n);
1873
1874 if (is_gimple_ip_invariant (arg))
1875 ipa_set_jf_constant (jfunc, arg, cs);
1876 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1877 && TREE_CODE (arg) == PARM_DECL)
1878 {
1879 int index = ipa_get_param_decl_index (info, arg);
1880
1881 gcc_assert (index >=0);
1882 /* Aggregate passed by value, check for pass-through, otherwise we
1883 will attempt to fill in aggregate contents later in this
1884 for cycle. */
1885 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1886 {
1887 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1888 continue;
1889 }
1890 }
1891 else if (TREE_CODE (arg) == SSA_NAME)
1892 {
1893 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1894 {
1895 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1896 if (index >= 0)
1897 {
1898 bool agg_p, type_p;
1899 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1900 if (param_type && POINTER_TYPE_P (param_type))
1901 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1902 call, jfunc);
1903 else
1904 type_p = false;
1905 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1906 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1907 type_p);
1908 }
1909 }
1910 else
1911 {
1912 gimple stmt = SSA_NAME_DEF_STMT (arg);
1913 if (is_gimple_assign (stmt))
1914 compute_complex_assign_jump_func (fbi, info, jfunc,
1915 call, stmt, arg, param_type);
1916 else if (gimple_code (stmt) == GIMPLE_PHI)
1917 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1918 call, stmt, param_type);
1919 }
1920 }
1921 else
1922 compute_known_type_jump_func (arg, jfunc, call,
1923 param_type
1924 && POINTER_TYPE_P (param_type)
1925 ? TREE_TYPE (param_type)
1926 : NULL);
1927
1928 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1929 passed (because type conversions are ignored in gimple). Usually we can
1930 safely get type from function declaration, but in case of K&R prototypes or
1931 variadic functions we can try our luck with type of the pointer passed.
1932 TODO: Since we look for actual initialization of the memory object, we may better
1933 work out the type based on the memory stores we find. */
1934 if (!param_type)
1935 param_type = TREE_TYPE (arg);
1936
1937 if ((jfunc->type != IPA_JF_PASS_THROUGH
1938 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1939 && (jfunc->type != IPA_JF_ANCESTOR
1940 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1941 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1942 || POINTER_TYPE_P (param_type)))
1943 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1944 }
1945 }
1946
1947 /* Compute jump functions for all edges - both direct and indirect - outgoing
1948 from BB. */
1949
1950 static void
1951 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1952 {
1953 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1954 int i;
1955 struct cgraph_edge *cs;
1956
1957 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1958 {
1959 struct cgraph_node *callee = cs->callee;
1960
1961 if (callee)
1962 {
1963 cgraph_function_or_thunk_node (callee, NULL);
1964 /* We do not need to bother analyzing calls to unknown functions
1965 unless they may become known during lto/whopr. */
1966 if (!callee->definition && !flag_lto)
1967 continue;
1968 }
1969 ipa_compute_jump_functions_for_edge (fbi, cs);
1970 }
1971 }
1972
1973 /* If STMT looks like a statement loading a value from a member pointer formal
1974 parameter, return that parameter and store the offset of the field to
1975 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1976 might be clobbered). If USE_DELTA, then we look for a use of the delta
1977 field rather than the pfn. */
1978
1979 static tree
1980 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1981 HOST_WIDE_INT *offset_p)
1982 {
1983 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1984
1985 if (!gimple_assign_single_p (stmt))
1986 return NULL_TREE;
1987
1988 rhs = gimple_assign_rhs1 (stmt);
1989 if (TREE_CODE (rhs) == COMPONENT_REF)
1990 {
1991 ref_field = TREE_OPERAND (rhs, 1);
1992 rhs = TREE_OPERAND (rhs, 0);
1993 }
1994 else
1995 ref_field = NULL_TREE;
1996 if (TREE_CODE (rhs) != MEM_REF)
1997 return NULL_TREE;
1998 rec = TREE_OPERAND (rhs, 0);
1999 if (TREE_CODE (rec) != ADDR_EXPR)
2000 return NULL_TREE;
2001 rec = TREE_OPERAND (rec, 0);
2002 if (TREE_CODE (rec) != PARM_DECL
2003 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2004 return NULL_TREE;
2005 ref_offset = TREE_OPERAND (rhs, 1);
2006
2007 if (use_delta)
2008 fld = delta_field;
2009 else
2010 fld = ptr_field;
2011 if (offset_p)
2012 *offset_p = int_bit_position (fld);
2013
2014 if (ref_field)
2015 {
2016 if (integer_nonzerop (ref_offset))
2017 return NULL_TREE;
2018 return ref_field == fld ? rec : NULL_TREE;
2019 }
2020 else
2021 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2022 : NULL_TREE;
2023 }
2024
2025 /* Returns true iff T is an SSA_NAME defined by a statement. */
2026
2027 static bool
2028 ipa_is_ssa_with_stmt_def (tree t)
2029 {
2030 if (TREE_CODE (t) == SSA_NAME
2031 && !SSA_NAME_IS_DEFAULT_DEF (t))
2032 return true;
2033 else
2034 return false;
2035 }
2036
2037 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2038 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2039 indirect call graph edge. */
2040
2041 static struct cgraph_edge *
2042 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
2043 {
2044 struct cgraph_edge *cs;
2045
2046 cs = cgraph_edge (node, stmt);
2047 cs->indirect_info->param_index = param_index;
2048 cs->indirect_info->agg_contents = 0;
2049 cs->indirect_info->member_ptr = 0;
2050 return cs;
2051 }
2052
2053 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2054 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2055 intermediate information about each formal parameter. Currently it checks
2056 whether the call calls a pointer that is a formal parameter and if so, the
2057 parameter is marked with the called flag and an indirect call graph edge
2058 describing the call is created. This is very simple for ordinary pointers
2059 represented in SSA but not-so-nice when it comes to member pointers. The
2060 ugly part of this function does nothing more than trying to match the
2061 pattern of such a call. An example of such a pattern is the gimple dump
2062 below, the call is on the last line:
2063
2064 <bb 2>:
2065 f$__delta_5 = f.__delta;
2066 f$__pfn_24 = f.__pfn;
2067
2068 or
2069 <bb 2>:
2070 f$__delta_5 = MEM[(struct *)&f];
2071 f$__pfn_24 = MEM[(struct *)&f + 4B];
2072
2073 and a few lines below:
2074
2075 <bb 5>
2076 D.2496_3 = (int) f$__pfn_24;
2077 D.2497_4 = D.2496_3 & 1;
2078 if (D.2497_4 != 0)
2079 goto <bb 3>;
2080 else
2081 goto <bb 4>;
2082
2083 <bb 6>:
2084 D.2500_7 = (unsigned int) f$__delta_5;
2085 D.2501_8 = &S + D.2500_7;
2086 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2087 D.2503_10 = *D.2502_9;
2088 D.2504_12 = f$__pfn_24 + -1;
2089 D.2505_13 = (unsigned int) D.2504_12;
2090 D.2506_14 = D.2503_10 + D.2505_13;
2091 D.2507_15 = *D.2506_14;
2092 iftmp.11_16 = (String:: *) D.2507_15;
2093
2094 <bb 7>:
2095 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2096 D.2500_19 = (unsigned int) f$__delta_5;
2097 D.2508_20 = &S + D.2500_19;
2098 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2099
2100 Such patterns are results of simple calls to a member pointer:
2101
2102 int doprinting (int (MyString::* f)(int) const)
2103 {
2104 MyString S ("somestring");
2105
2106 return (S.*f)(4);
2107 }
2108
2109 Moreover, the function also looks for called pointers loaded from aggregates
2110 passed by value or reference. */
2111
2112 static void
2113 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2114 tree target)
2115 {
2116 struct ipa_node_params *info = fbi->info;
2117 HOST_WIDE_INT offset;
2118 bool by_ref;
2119
2120 if (SSA_NAME_IS_DEFAULT_DEF (target))
2121 {
2122 tree var = SSA_NAME_VAR (target);
2123 int index = ipa_get_param_decl_index (info, var);
2124 if (index >= 0)
2125 ipa_note_param_call (fbi->node, index, call);
2126 return;
2127 }
2128
2129 int index;
2130 gimple def = SSA_NAME_DEF_STMT (target);
2131 if (gimple_assign_single_p (def)
2132 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2133 gimple_assign_rhs1 (def), &index, &offset,
2134 NULL, &by_ref))
2135 {
2136 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2137 if (cs->indirect_info->offset != offset)
2138 cs->indirect_info->outer_type = NULL;
2139 cs->indirect_info->offset = offset;
2140 cs->indirect_info->agg_contents = 1;
2141 cs->indirect_info->by_ref = by_ref;
2142 return;
2143 }
2144
2145 /* Now we need to try to match the complex pattern of calling a member
2146 pointer. */
2147 if (gimple_code (def) != GIMPLE_PHI
2148 || gimple_phi_num_args (def) != 2
2149 || !POINTER_TYPE_P (TREE_TYPE (target))
2150 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2151 return;
2152
2153 /* First, we need to check whether one of these is a load from a member
2154 pointer that is a parameter to this function. */
2155 tree n1 = PHI_ARG_DEF (def, 0);
2156 tree n2 = PHI_ARG_DEF (def, 1);
2157 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2158 return;
2159 gimple d1 = SSA_NAME_DEF_STMT (n1);
2160 gimple d2 = SSA_NAME_DEF_STMT (n2);
2161
2162 tree rec;
2163 basic_block bb, virt_bb;
2164 basic_block join = gimple_bb (def);
2165 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2166 {
2167 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2168 return;
2169
2170 bb = EDGE_PRED (join, 0)->src;
2171 virt_bb = gimple_bb (d2);
2172 }
2173 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2174 {
2175 bb = EDGE_PRED (join, 1)->src;
2176 virt_bb = gimple_bb (d1);
2177 }
2178 else
2179 return;
2180
2181 /* Second, we need to check that the basic blocks are laid out in the way
2182 corresponding to the pattern. */
2183
2184 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2185 || single_pred (virt_bb) != bb
2186 || single_succ (virt_bb) != join)
2187 return;
2188
2189 /* Third, let's see that the branching is done depending on the least
2190 significant bit of the pfn. */
2191
2192 gimple branch = last_stmt (bb);
2193 if (!branch || gimple_code (branch) != GIMPLE_COND)
2194 return;
2195
2196 if ((gimple_cond_code (branch) != NE_EXPR
2197 && gimple_cond_code (branch) != EQ_EXPR)
2198 || !integer_zerop (gimple_cond_rhs (branch)))
2199 return;
2200
2201 tree cond = gimple_cond_lhs (branch);
2202 if (!ipa_is_ssa_with_stmt_def (cond))
2203 return;
2204
2205 def = SSA_NAME_DEF_STMT (cond);
2206 if (!is_gimple_assign (def)
2207 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2208 || !integer_onep (gimple_assign_rhs2 (def)))
2209 return;
2210
2211 cond = gimple_assign_rhs1 (def);
2212 if (!ipa_is_ssa_with_stmt_def (cond))
2213 return;
2214
2215 def = SSA_NAME_DEF_STMT (cond);
2216
2217 if (is_gimple_assign (def)
2218 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2219 {
2220 cond = gimple_assign_rhs1 (def);
2221 if (!ipa_is_ssa_with_stmt_def (cond))
2222 return;
2223 def = SSA_NAME_DEF_STMT (cond);
2224 }
2225
2226 tree rec2;
2227 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2228 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2229 == ptrmemfunc_vbit_in_delta),
2230 NULL);
2231 if (rec != rec2)
2232 return;
2233
2234 index = ipa_get_param_decl_index (info, rec);
2235 if (index >= 0
2236 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2237 {
2238 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2239 if (cs->indirect_info->offset != offset)
2240 cs->indirect_info->outer_type = NULL;
2241 cs->indirect_info->offset = offset;
2242 cs->indirect_info->agg_contents = 1;
2243 cs->indirect_info->member_ptr = 1;
2244 }
2245
2246 return;
2247 }
2248
2249 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2250 object referenced in the expression is a formal parameter of the caller
2251 FBI->node (described by FBI->info), create a call note for the
2252 statement. */
2253
2254 static void
2255 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2256 gimple call, tree target)
2257 {
2258 tree obj = OBJ_TYPE_REF_OBJECT (target);
2259 int index;
2260 HOST_WIDE_INT anc_offset;
2261
2262 if (!flag_devirtualize)
2263 return;
2264
2265 if (TREE_CODE (obj) != SSA_NAME)
2266 return;
2267
2268 struct ipa_node_params *info = fbi->info;
2269 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2270 {
2271 struct ipa_jump_func jfunc;
2272 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2273 return;
2274
2275 anc_offset = 0;
2276 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2277 gcc_assert (index >= 0);
2278 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2279 call, &jfunc))
2280 return;
2281 }
2282 else
2283 {
2284 struct ipa_jump_func jfunc;
2285 gimple stmt = SSA_NAME_DEF_STMT (obj);
2286 tree expr;
2287
2288 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2289 if (!expr)
2290 return;
2291 index = ipa_get_param_decl_index (info,
2292 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2293 gcc_assert (index >= 0);
2294 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2295 call, &jfunc, anc_offset))
2296 return;
2297 }
2298
2299 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2300 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2301 ii->offset = anc_offset;
2302 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2303 ii->otr_type = obj_type_ref_class (target);
2304 ii->polymorphic = 1;
2305 }
2306
2307 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2308 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2309 containing intermediate information about each formal parameter. */
2310
2311 static void
2312 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2313 {
2314 tree target = gimple_call_fn (call);
2315
2316 if (!target
2317 || (TREE_CODE (target) != SSA_NAME
2318 && !virtual_method_call_p (target)))
2319 return;
2320
2321 /* If we previously turned the call into a direct call, there is
2322 no need to analyze. */
2323 struct cgraph_edge *cs = cgraph_edge (fbi->node, call);
2324 if (cs && !cs->indirect_unknown_callee)
2325 return;
2326 if (TREE_CODE (target) == SSA_NAME)
2327 ipa_analyze_indirect_call_uses (fbi, call, target);
2328 else if (virtual_method_call_p (target))
2329 ipa_analyze_virtual_call_uses (fbi, call, target);
2330 }
2331
2332
2333 /* Analyze the call statement STMT with respect to formal parameters (described
2334 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2335 formal parameters are called. */
2336
2337 static void
2338 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2339 {
2340 if (is_gimple_call (stmt))
2341 ipa_analyze_call_uses (fbi, stmt);
2342 }
2343
2344 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2345 If OP is a parameter declaration, mark it as used in the info structure
2346 passed in DATA. */
2347
2348 static bool
2349 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2350 {
2351 struct ipa_node_params *info = (struct ipa_node_params *) data;
2352
2353 op = get_base_address (op);
2354 if (op
2355 && TREE_CODE (op) == PARM_DECL)
2356 {
2357 int index = ipa_get_param_decl_index (info, op);
2358 gcc_assert (index >= 0);
2359 ipa_set_param_used (info, index, true);
2360 }
2361
2362 return false;
2363 }
2364
2365 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2366 the findings in various structures of the associated ipa_node_params
2367 structure, such as parameter flags, notes etc. FBI holds various data about
2368 the function being analyzed. */
2369
2370 static void
2371 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2372 {
2373 gimple_stmt_iterator gsi;
2374 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2375 {
2376 gimple stmt = gsi_stmt (gsi);
2377
2378 if (is_gimple_debug (stmt))
2379 continue;
2380
2381 ipa_analyze_stmt_uses (fbi, stmt);
2382 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2383 visit_ref_for_mod_analysis,
2384 visit_ref_for_mod_analysis,
2385 visit_ref_for_mod_analysis);
2386 }
2387 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2388 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2389 visit_ref_for_mod_analysis,
2390 visit_ref_for_mod_analysis,
2391 visit_ref_for_mod_analysis);
2392 }
2393
2394 /* Calculate controlled uses of parameters of NODE. */
2395
2396 static void
2397 ipa_analyze_controlled_uses (struct cgraph_node *node)
2398 {
2399 struct ipa_node_params *info = IPA_NODE_REF (node);
2400
2401 for (int i = 0; i < ipa_get_param_count (info); i++)
2402 {
2403 tree parm = ipa_get_param (info, i);
2404 int controlled_uses = 0;
2405
2406 /* For SSA regs see if parameter is used. For non-SSA we compute
2407 the flag during modification analysis. */
2408 if (is_gimple_reg (parm))
2409 {
2410 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2411 parm);
2412 if (ddef && !has_zero_uses (ddef))
2413 {
2414 imm_use_iterator imm_iter;
2415 use_operand_p use_p;
2416
2417 ipa_set_param_used (info, i, true);
2418 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2419 if (!is_gimple_call (USE_STMT (use_p)))
2420 {
2421 if (!is_gimple_debug (USE_STMT (use_p)))
2422 {
2423 controlled_uses = IPA_UNDESCRIBED_USE;
2424 break;
2425 }
2426 }
2427 else
2428 controlled_uses++;
2429 }
2430 else
2431 controlled_uses = 0;
2432 }
2433 else
2434 controlled_uses = IPA_UNDESCRIBED_USE;
2435 ipa_set_controlled_uses (info, i, controlled_uses);
2436 }
2437 }
2438
2439 /* Free stuff in BI. */
2440
2441 static void
2442 free_ipa_bb_info (struct ipa_bb_info *bi)
2443 {
2444 bi->cg_edges.release ();
2445 bi->param_aa_statuses.release ();
2446 }
2447
2448 /* Dominator walker driving the analysis. */
2449
2450 class analysis_dom_walker : public dom_walker
2451 {
2452 public:
2453 analysis_dom_walker (struct func_body_info *fbi)
2454 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2455
2456 virtual void before_dom_children (basic_block);
2457
2458 private:
2459 struct func_body_info *m_fbi;
2460 };
2461
2462 void
2463 analysis_dom_walker::before_dom_children (basic_block bb)
2464 {
2465 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2466 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2467 }
2468
2469 /* Initialize the array describing properties of of formal parameters
2470 of NODE, analyze their uses and compute jump functions associated
2471 with actual arguments of calls from within NODE. */
2472
2473 void
2474 ipa_analyze_node (struct cgraph_node *node)
2475 {
2476 struct func_body_info fbi;
2477 struct ipa_node_params *info;
2478
2479 ipa_check_create_node_params ();
2480 ipa_check_create_edge_args ();
2481 info = IPA_NODE_REF (node);
2482
2483 if (info->analysis_done)
2484 return;
2485 info->analysis_done = 1;
2486
2487 if (ipa_func_spec_opts_forbid_analysis_p (node))
2488 {
2489 for (int i = 0; i < ipa_get_param_count (info); i++)
2490 {
2491 ipa_set_param_used (info, i, true);
2492 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2493 }
2494 return;
2495 }
2496
2497 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2498 push_cfun (func);
2499 calculate_dominance_info (CDI_DOMINATORS);
2500 ipa_initialize_node_params (node);
2501 ipa_analyze_controlled_uses (node);
2502
2503 fbi.node = node;
2504 fbi.info = IPA_NODE_REF (node);
2505 fbi.bb_infos = vNULL;
2506 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2507 fbi.param_count = ipa_get_param_count (info);
2508 fbi.aa_walked = 0;
2509
2510 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2511 {
2512 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2513 bi->cg_edges.safe_push (cs);
2514 }
2515
2516 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2517 {
2518 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2519 bi->cg_edges.safe_push (cs);
2520 }
2521
2522 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2523
2524 int i;
2525 struct ipa_bb_info *bi;
2526 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2527 free_ipa_bb_info (bi);
2528 fbi.bb_infos.release ();
2529 free_dominance_info (CDI_DOMINATORS);
2530 pop_cfun ();
2531 }
2532
2533 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2534 attempt a type-based devirtualization. If successful, return the
2535 target function declaration, otherwise return NULL. */
2536
2537 tree
2538 ipa_intraprocedural_devirtualization (gimple call)
2539 {
2540 tree binfo, token, fndecl;
2541 struct ipa_jump_func jfunc;
2542 tree otr = gimple_call_fn (call);
2543
2544 jfunc.type = IPA_JF_UNKNOWN;
2545 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2546 call, obj_type_ref_class (otr));
2547 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2548 return NULL_TREE;
2549 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2550 if (!binfo)
2551 return NULL_TREE;
2552 token = OBJ_TYPE_REF_TOKEN (otr);
2553 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2554 binfo);
2555 #ifdef ENABLE_CHECKING
2556 if (fndecl)
2557 gcc_assert (possible_polymorphic_call_target_p
2558 (otr, cgraph_get_node (fndecl)));
2559 #endif
2560 return fndecl;
2561 }
2562
2563 /* Update the jump function DST when the call graph edge corresponding to SRC is
2564 is being inlined, knowing that DST is of type ancestor and src of known
2565 type. */
2566
2567 static void
2568 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2569 struct ipa_jump_func *dst)
2570 {
2571 HOST_WIDE_INT combined_offset;
2572 tree combined_type;
2573
2574 if (!ipa_get_jf_ancestor_type_preserved (dst))
2575 {
2576 dst->type = IPA_JF_UNKNOWN;
2577 return;
2578 }
2579
2580 combined_offset = ipa_get_jf_known_type_offset (src)
2581 + ipa_get_jf_ancestor_offset (dst);
2582 combined_type = ipa_get_jf_ancestor_type (dst);
2583
2584 ipa_set_jf_known_type (dst, combined_offset,
2585 ipa_get_jf_known_type_base_type (src),
2586 combined_type);
2587 }
2588
2589 /* Update the jump functions associated with call graph edge E when the call
2590 graph edge CS is being inlined, assuming that E->caller is already (possibly
2591 indirectly) inlined into CS->callee and that E has not been inlined. */
2592
2593 static void
2594 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2595 struct cgraph_edge *e)
2596 {
2597 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2598 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2599 int count = ipa_get_cs_argument_count (args);
2600 int i;
2601
2602 for (i = 0; i < count; i++)
2603 {
2604 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2605
2606 if (dst->type == IPA_JF_ANCESTOR)
2607 {
2608 struct ipa_jump_func *src;
2609 int dst_fid = dst->value.ancestor.formal_id;
2610
2611 /* Variable number of arguments can cause havoc if we try to access
2612 one that does not exist in the inlined edge. So make sure we
2613 don't. */
2614 if (dst_fid >= ipa_get_cs_argument_count (top))
2615 {
2616 dst->type = IPA_JF_UNKNOWN;
2617 continue;
2618 }
2619
2620 src = ipa_get_ith_jump_func (top, dst_fid);
2621
2622 if (src->agg.items
2623 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2624 {
2625 struct ipa_agg_jf_item *item;
2626 int j;
2627
2628 /* Currently we do not produce clobber aggregate jump functions,
2629 replace with merging when we do. */
2630 gcc_assert (!dst->agg.items);
2631
2632 dst->agg.items = vec_safe_copy (src->agg.items);
2633 dst->agg.by_ref = src->agg.by_ref;
2634 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2635 item->offset -= dst->value.ancestor.offset;
2636 }
2637
2638 if (src->type == IPA_JF_KNOWN_TYPE)
2639 combine_known_type_and_ancestor_jfs (src, dst);
2640 else if (src->type == IPA_JF_PASS_THROUGH
2641 && src->value.pass_through.operation == NOP_EXPR)
2642 {
2643 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2644 dst->value.ancestor.agg_preserved &=
2645 src->value.pass_through.agg_preserved;
2646 dst->value.ancestor.type_preserved &=
2647 src->value.pass_through.type_preserved;
2648 }
2649 else if (src->type == IPA_JF_ANCESTOR)
2650 {
2651 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2652 dst->value.ancestor.offset += src->value.ancestor.offset;
2653 dst->value.ancestor.agg_preserved &=
2654 src->value.ancestor.agg_preserved;
2655 dst->value.ancestor.type_preserved &=
2656 src->value.ancestor.type_preserved;
2657 }
2658 else
2659 dst->type = IPA_JF_UNKNOWN;
2660 }
2661 else if (dst->type == IPA_JF_PASS_THROUGH)
2662 {
2663 struct ipa_jump_func *src;
2664 /* We must check range due to calls with variable number of arguments
2665 and we cannot combine jump functions with operations. */
2666 if (dst->value.pass_through.operation == NOP_EXPR
2667 && (dst->value.pass_through.formal_id
2668 < ipa_get_cs_argument_count (top)))
2669 {
2670 int dst_fid = dst->value.pass_through.formal_id;
2671 src = ipa_get_ith_jump_func (top, dst_fid);
2672 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2673
2674 switch (src->type)
2675 {
2676 case IPA_JF_UNKNOWN:
2677 dst->type = IPA_JF_UNKNOWN;
2678 break;
2679 case IPA_JF_KNOWN_TYPE:
2680 if (ipa_get_jf_pass_through_type_preserved (dst))
2681 ipa_set_jf_known_type (dst,
2682 ipa_get_jf_known_type_offset (src),
2683 ipa_get_jf_known_type_base_type (src),
2684 ipa_get_jf_known_type_component_type (src));
2685 else
2686 dst->type = IPA_JF_UNKNOWN;
2687 break;
2688 case IPA_JF_CONST:
2689 ipa_set_jf_cst_copy (dst, src);
2690 break;
2691
2692 case IPA_JF_PASS_THROUGH:
2693 {
2694 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2695 enum tree_code operation;
2696 operation = ipa_get_jf_pass_through_operation (src);
2697
2698 if (operation == NOP_EXPR)
2699 {
2700 bool agg_p, type_p;
2701 agg_p = dst_agg_p
2702 && ipa_get_jf_pass_through_agg_preserved (src);
2703 type_p = ipa_get_jf_pass_through_type_preserved (src)
2704 && ipa_get_jf_pass_through_type_preserved (dst);
2705 ipa_set_jf_simple_pass_through (dst, formal_id,
2706 agg_p, type_p);
2707 }
2708 else
2709 {
2710 tree operand = ipa_get_jf_pass_through_operand (src);
2711 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2712 operation);
2713 }
2714 break;
2715 }
2716 case IPA_JF_ANCESTOR:
2717 {
2718 bool agg_p, type_p;
2719 agg_p = dst_agg_p
2720 && ipa_get_jf_ancestor_agg_preserved (src);
2721 type_p = ipa_get_jf_ancestor_type_preserved (src)
2722 && ipa_get_jf_pass_through_type_preserved (dst);
2723 ipa_set_ancestor_jf (dst,
2724 ipa_get_jf_ancestor_offset (src),
2725 ipa_get_jf_ancestor_type (src),
2726 ipa_get_jf_ancestor_formal_id (src),
2727 agg_p, type_p);
2728 break;
2729 }
2730 default:
2731 gcc_unreachable ();
2732 }
2733
2734 if (src->agg.items
2735 && (dst_agg_p || !src->agg.by_ref))
2736 {
2737 /* Currently we do not produce clobber aggregate jump
2738 functions, replace with merging when we do. */
2739 gcc_assert (!dst->agg.items);
2740
2741 dst->agg.by_ref = src->agg.by_ref;
2742 dst->agg.items = vec_safe_copy (src->agg.items);
2743 }
2744 }
2745 else
2746 dst->type = IPA_JF_UNKNOWN;
2747 }
2748 }
2749 }
2750
2751 /* If TARGET is an addr_expr of a function declaration, make it the destination
2752 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2753
2754 struct cgraph_edge *
2755 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2756 {
2757 struct cgraph_node *callee;
2758 struct inline_edge_summary *es = inline_edge_summary (ie);
2759 bool unreachable = false;
2760
2761 if (TREE_CODE (target) == ADDR_EXPR)
2762 target = TREE_OPERAND (target, 0);
2763 if (TREE_CODE (target) != FUNCTION_DECL)
2764 {
2765 target = canonicalize_constructor_val (target, NULL);
2766 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2767 {
2768 if (ie->indirect_info->member_ptr)
2769 /* Member pointer call that goes through a VMT lookup. */
2770 return NULL;
2771
2772 if (dump_enabled_p ())
2773 {
2774 location_t loc = gimple_location_safe (ie->call_stmt);
2775 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2776 "discovered direct call to non-function in %s/%i, "
2777 "making it __builtin_unreachable\n",
2778 ie->caller->name (), ie->caller->order);
2779 }
2780
2781 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2782 callee = cgraph_get_create_node (target);
2783 unreachable = true;
2784 }
2785 else
2786 callee = cgraph_get_node (target);
2787 }
2788 else
2789 callee = cgraph_get_node (target);
2790
2791 /* Because may-edges are not explicitely represented and vtable may be external,
2792 we may create the first reference to the object in the unit. */
2793 if (!callee || callee->global.inlined_to)
2794 {
2795
2796 /* We are better to ensure we can refer to it.
2797 In the case of static functions we are out of luck, since we already
2798 removed its body. In the case of public functions we may or may
2799 not introduce the reference. */
2800 if (!canonicalize_constructor_val (target, NULL)
2801 || !TREE_PUBLIC (target))
2802 {
2803 if (dump_file)
2804 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2805 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2806 xstrdup (ie->caller->name ()),
2807 ie->caller->order,
2808 xstrdup (ie->callee->name ()),
2809 ie->callee->order);
2810 return NULL;
2811 }
2812 callee = cgraph_get_create_node (target);
2813 }
2814
2815 if (!dbg_cnt (devirt))
2816 return NULL;
2817
2818 ipa_check_create_node_params ();
2819
2820 /* We can not make edges to inline clones. It is bug that someone removed
2821 the cgraph node too early. */
2822 gcc_assert (!callee->global.inlined_to);
2823
2824 if (dump_file && !unreachable)
2825 {
2826 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2827 "(%s/%i -> %s/%i), for stmt ",
2828 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2829 xstrdup (ie->caller->name ()),
2830 ie->caller->order,
2831 xstrdup (callee->name ()),
2832 callee->order);
2833 if (ie->call_stmt)
2834 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2835 else
2836 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2837 }
2838 if (dump_enabled_p ())
2839 {
2840 location_t loc = gimple_location_safe (ie->call_stmt);
2841
2842 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2843 "converting indirect call in %s to direct call to %s\n",
2844 ie->caller->name (), callee->name ());
2845 }
2846 ie = cgraph_make_edge_direct (ie, callee);
2847 es = inline_edge_summary (ie);
2848 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2849 - eni_size_weights.call_cost);
2850 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2851 - eni_time_weights.call_cost);
2852
2853 return ie;
2854 }
2855
2856 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2857 return NULL if there is not any. BY_REF specifies whether the value has to
2858 be passed by reference or by value. */
2859
2860 tree
2861 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2862 HOST_WIDE_INT offset, bool by_ref)
2863 {
2864 struct ipa_agg_jf_item *item;
2865 int i;
2866
2867 if (by_ref != agg->by_ref)
2868 return NULL;
2869
2870 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2871 if (item->offset == offset)
2872 {
2873 /* Currently we do not have clobber values, return NULL for them once
2874 we do. */
2875 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2876 return item->value;
2877 }
2878 return NULL;
2879 }
2880
2881 /* Remove a reference to SYMBOL from the list of references of a node given by
2882 reference description RDESC. Return true if the reference has been
2883 successfully found and removed. */
2884
2885 static bool
2886 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2887 {
2888 struct ipa_ref *to_del;
2889 struct cgraph_edge *origin;
2890
2891 origin = rdesc->cs;
2892 if (!origin)
2893 return false;
2894 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2895 origin->lto_stmt_uid);
2896 if (!to_del)
2897 return false;
2898
2899 to_del->remove_reference ();
2900 if (dump_file)
2901 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2902 xstrdup (origin->caller->name ()),
2903 origin->caller->order, xstrdup (symbol->name ()));
2904 return true;
2905 }
2906
2907 /* If JFUNC has a reference description with refcount different from
2908 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2909 NULL. JFUNC must be a constant jump function. */
2910
2911 static struct ipa_cst_ref_desc *
2912 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2913 {
2914 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2915 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2916 return rdesc;
2917 else
2918 return NULL;
2919 }
2920
2921 /* If the value of constant jump function JFUNC is an address of a function
2922 declaration, return the associated call graph node. Otherwise return
2923 NULL. */
2924
2925 static cgraph_node *
2926 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2927 {
2928 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2929 tree cst = ipa_get_jf_constant (jfunc);
2930 if (TREE_CODE (cst) != ADDR_EXPR
2931 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2932 return NULL;
2933
2934 return cgraph_get_node (TREE_OPERAND (cst, 0));
2935 }
2936
2937
2938 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2939 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2940 the edge specified in the rdesc. Return false if either the symbol or the
2941 reference could not be found, otherwise return true. */
2942
2943 static bool
2944 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2945 {
2946 struct ipa_cst_ref_desc *rdesc;
2947 if (jfunc->type == IPA_JF_CONST
2948 && (rdesc = jfunc_rdesc_usable (jfunc))
2949 && --rdesc->refcount == 0)
2950 {
2951 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2952 if (!symbol)
2953 return false;
2954
2955 return remove_described_reference (symbol, rdesc);
2956 }
2957 return true;
2958 }
2959
2960 /* Try to find a destination for indirect edge IE that corresponds to a simple
2961 call or a call of a member function pointer and where the destination is a
2962 pointer formal parameter described by jump function JFUNC. If it can be
2963 determined, return the newly direct edge, otherwise return NULL.
2964 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2965
2966 static struct cgraph_edge *
2967 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2968 struct ipa_jump_func *jfunc,
2969 struct ipa_node_params *new_root_info)
2970 {
2971 struct cgraph_edge *cs;
2972 tree target;
2973 bool agg_contents = ie->indirect_info->agg_contents;
2974
2975 if (ie->indirect_info->agg_contents)
2976 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2977 ie->indirect_info->offset,
2978 ie->indirect_info->by_ref);
2979 else
2980 target = ipa_value_from_jfunc (new_root_info, jfunc);
2981 if (!target)
2982 return NULL;
2983 cs = ipa_make_edge_direct_to_target (ie, target);
2984
2985 if (cs && !agg_contents)
2986 {
2987 bool ok;
2988 gcc_checking_assert (cs->callee
2989 && (cs != ie
2990 || jfunc->type != IPA_JF_CONST
2991 || !cgraph_node_for_jfunc (jfunc)
2992 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2993 ok = try_decrement_rdesc_refcount (jfunc);
2994 gcc_checking_assert (ok);
2995 }
2996
2997 return cs;
2998 }
2999
3000 /* Return the target to be used in cases of impossible devirtualization. IE
3001 and target (the latter can be NULL) are dumped when dumping is enabled. */
3002
3003 tree
3004 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3005 {
3006 if (dump_file)
3007 {
3008 if (target)
3009 fprintf (dump_file,
3010 "Type inconsistent devirtualization: %s/%i->%s\n",
3011 ie->caller->name (), ie->caller->order,
3012 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3013 else
3014 fprintf (dump_file,
3015 "No devirtualization target in %s/%i\n",
3016 ie->caller->name (), ie->caller->order);
3017 }
3018 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3019 cgraph_get_create_node (new_target);
3020 return new_target;
3021 }
3022
3023 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3024 call based on a formal parameter which is described by jump function JFUNC
3025 and if it can be determined, make it direct and return the direct edge.
3026 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3027 are relative to. */
3028
3029 static struct cgraph_edge *
3030 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3031 struct ipa_jump_func *jfunc,
3032 struct ipa_node_params *new_root_info)
3033 {
3034 tree binfo, target;
3035
3036 if (!flag_devirtualize)
3037 return NULL;
3038
3039 /* First try to do lookup via known virtual table pointer value. */
3040 if (!ie->indirect_info->by_ref)
3041 {
3042 tree vtable;
3043 unsigned HOST_WIDE_INT offset;
3044 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
3045 ie->indirect_info->offset,
3046 true);
3047 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3048 {
3049 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3050 vtable, offset);
3051 if (target)
3052 {
3053 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
3054 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
3055 || !possible_polymorphic_call_target_p
3056 (ie, cgraph_get_node (target)))
3057 target = ipa_impossible_devirt_target (ie, target);
3058 return ipa_make_edge_direct_to_target (ie, target);
3059 }
3060 }
3061 }
3062
3063 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
3064
3065 if (!binfo)
3066 return NULL;
3067
3068 if (TREE_CODE (binfo) != TREE_BINFO)
3069 {
3070 ipa_polymorphic_call_context context;
3071 vec <cgraph_node *>targets;
3072 bool final;
3073
3074 if (!get_polymorphic_call_info_from_invariant
3075 (&context, binfo, ie->indirect_info->otr_type,
3076 ie->indirect_info->offset))
3077 return NULL;
3078 targets = possible_polymorphic_call_targets
3079 (ie->indirect_info->otr_type,
3080 ie->indirect_info->otr_token,
3081 context, &final);
3082 if (!final || targets.length () > 1)
3083 return NULL;
3084 if (targets.length () == 1)
3085 target = targets[0]->decl;
3086 else
3087 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3088 }
3089 else
3090 {
3091 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3092 ie->indirect_info->otr_type);
3093 if (binfo)
3094 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3095 binfo);
3096 else
3097 return NULL;
3098 }
3099
3100 if (target)
3101 {
3102 if (!possible_polymorphic_call_target_p (ie, cgraph_get_node (target)))
3103 target = ipa_impossible_devirt_target (ie, target);
3104 return ipa_make_edge_direct_to_target (ie, target);
3105 }
3106 else
3107 return NULL;
3108 }
3109
3110 /* Update the param called notes associated with NODE when CS is being inlined,
3111 assuming NODE is (potentially indirectly) inlined into CS->callee.
3112 Moreover, if the callee is discovered to be constant, create a new cgraph
3113 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3114 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3115
3116 static bool
3117 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3118 struct cgraph_node *node,
3119 vec<cgraph_edge_p> *new_edges)
3120 {
3121 struct ipa_edge_args *top;
3122 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3123 struct ipa_node_params *new_root_info;
3124 bool res = false;
3125
3126 ipa_check_create_edge_args ();
3127 top = IPA_EDGE_REF (cs);
3128 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3129 ? cs->caller->global.inlined_to
3130 : cs->caller);
3131
3132 for (ie = node->indirect_calls; ie; ie = next_ie)
3133 {
3134 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3135 struct ipa_jump_func *jfunc;
3136 int param_index;
3137
3138 next_ie = ie->next_callee;
3139
3140 if (ici->param_index == -1)
3141 continue;
3142
3143 /* We must check range due to calls with variable number of arguments: */
3144 if (ici->param_index >= ipa_get_cs_argument_count (top))
3145 {
3146 ici->param_index = -1;
3147 continue;
3148 }
3149
3150 param_index = ici->param_index;
3151 jfunc = ipa_get_ith_jump_func (top, param_index);
3152
3153 if (!flag_indirect_inlining)
3154 new_direct_edge = NULL;
3155 else if (ici->polymorphic)
3156 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3157 new_root_info);
3158 else
3159 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3160 new_root_info);
3161 /* If speculation was removed, then we need to do nothing. */
3162 if (new_direct_edge && new_direct_edge != ie)
3163 {
3164 new_direct_edge->indirect_inlining_edge = 1;
3165 top = IPA_EDGE_REF (cs);
3166 res = true;
3167 }
3168 else if (new_direct_edge)
3169 {
3170 new_direct_edge->indirect_inlining_edge = 1;
3171 if (new_direct_edge->call_stmt)
3172 new_direct_edge->call_stmt_cannot_inline_p
3173 = !gimple_check_call_matching_types (
3174 new_direct_edge->call_stmt,
3175 new_direct_edge->callee->decl, false);
3176 if (new_edges)
3177 {
3178 new_edges->safe_push (new_direct_edge);
3179 res = true;
3180 }
3181 top = IPA_EDGE_REF (cs);
3182 }
3183 else if (jfunc->type == IPA_JF_PASS_THROUGH
3184 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3185 {
3186 if ((ici->agg_contents
3187 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3188 || (ici->polymorphic
3189 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3190 ici->param_index = -1;
3191 else
3192 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3193 }
3194 else if (jfunc->type == IPA_JF_ANCESTOR)
3195 {
3196 if ((ici->agg_contents
3197 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3198 || (ici->polymorphic
3199 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3200 ici->param_index = -1;
3201 else
3202 {
3203 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3204 if (ipa_get_jf_ancestor_offset (jfunc))
3205 ici->outer_type = NULL;
3206 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3207 }
3208 }
3209 else
3210 /* Either we can find a destination for this edge now or never. */
3211 ici->param_index = -1;
3212 }
3213
3214 return res;
3215 }
3216
3217 /* Recursively traverse subtree of NODE (including node) made of inlined
3218 cgraph_edges when CS has been inlined and invoke
3219 update_indirect_edges_after_inlining on all nodes and
3220 update_jump_functions_after_inlining on all non-inlined edges that lead out
3221 of this subtree. Newly discovered indirect edges will be added to
3222 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3223 created. */
3224
3225 static bool
3226 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3227 struct cgraph_node *node,
3228 vec<cgraph_edge_p> *new_edges)
3229 {
3230 struct cgraph_edge *e;
3231 bool res;
3232
3233 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3234
3235 for (e = node->callees; e; e = e->next_callee)
3236 if (!e->inline_failed)
3237 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3238 else
3239 update_jump_functions_after_inlining (cs, e);
3240 for (e = node->indirect_calls; e; e = e->next_callee)
3241 update_jump_functions_after_inlining (cs, e);
3242
3243 return res;
3244 }
3245
3246 /* Combine two controlled uses counts as done during inlining. */
3247
3248 static int
3249 combine_controlled_uses_counters (int c, int d)
3250 {
3251 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3252 return IPA_UNDESCRIBED_USE;
3253 else
3254 return c + d - 1;
3255 }
3256
3257 /* Propagate number of controlled users from CS->caleee to the new root of the
3258 tree of inlined nodes. */
3259
3260 static void
3261 propagate_controlled_uses (struct cgraph_edge *cs)
3262 {
3263 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3264 struct cgraph_node *new_root = cs->caller->global.inlined_to
3265 ? cs->caller->global.inlined_to : cs->caller;
3266 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3267 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3268 int count, i;
3269
3270 count = MIN (ipa_get_cs_argument_count (args),
3271 ipa_get_param_count (old_root_info));
3272 for (i = 0; i < count; i++)
3273 {
3274 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3275 struct ipa_cst_ref_desc *rdesc;
3276
3277 if (jf->type == IPA_JF_PASS_THROUGH)
3278 {
3279 int src_idx, c, d;
3280 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3281 c = ipa_get_controlled_uses (new_root_info, src_idx);
3282 d = ipa_get_controlled_uses (old_root_info, i);
3283
3284 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3285 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3286 c = combine_controlled_uses_counters (c, d);
3287 ipa_set_controlled_uses (new_root_info, src_idx, c);
3288 if (c == 0 && new_root_info->ipcp_orig_node)
3289 {
3290 struct cgraph_node *n;
3291 struct ipa_ref *ref;
3292 tree t = new_root_info->known_vals[src_idx];
3293
3294 if (t && TREE_CODE (t) == ADDR_EXPR
3295 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3296 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
3297 && (ref = new_root->find_reference (n, NULL, 0)))
3298 {
3299 if (dump_file)
3300 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3301 "reference from %s/%i to %s/%i.\n",
3302 xstrdup (new_root->name ()),
3303 new_root->order,
3304 xstrdup (n->name ()), n->order);
3305 ref->remove_reference ();
3306 }
3307 }
3308 }
3309 else if (jf->type == IPA_JF_CONST
3310 && (rdesc = jfunc_rdesc_usable (jf)))
3311 {
3312 int d = ipa_get_controlled_uses (old_root_info, i);
3313 int c = rdesc->refcount;
3314 rdesc->refcount = combine_controlled_uses_counters (c, d);
3315 if (rdesc->refcount == 0)
3316 {
3317 tree cst = ipa_get_jf_constant (jf);
3318 struct cgraph_node *n;
3319 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3320 && TREE_CODE (TREE_OPERAND (cst, 0))
3321 == FUNCTION_DECL);
3322 n = cgraph_get_node (TREE_OPERAND (cst, 0));
3323 if (n)
3324 {
3325 struct cgraph_node *clone;
3326 bool ok;
3327 ok = remove_described_reference (n, rdesc);
3328 gcc_checking_assert (ok);
3329
3330 clone = cs->caller;
3331 while (clone->global.inlined_to
3332 && clone != rdesc->cs->caller
3333 && IPA_NODE_REF (clone)->ipcp_orig_node)
3334 {
3335 struct ipa_ref *ref;
3336 ref = clone->find_reference (n, NULL, 0);
3337 if (ref)
3338 {
3339 if (dump_file)
3340 fprintf (dump_file, "ipa-prop: Removing "
3341 "cloning-created reference "
3342 "from %s/%i to %s/%i.\n",
3343 xstrdup (clone->name ()),
3344 clone->order,
3345 xstrdup (n->name ()),
3346 n->order);
3347 ref->remove_reference ();
3348 }
3349 clone = clone->callers->caller;
3350 }
3351 }
3352 }
3353 }
3354 }
3355
3356 for (i = ipa_get_param_count (old_root_info);
3357 i < ipa_get_cs_argument_count (args);
3358 i++)
3359 {
3360 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3361
3362 if (jf->type == IPA_JF_CONST)
3363 {
3364 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3365 if (rdesc)
3366 rdesc->refcount = IPA_UNDESCRIBED_USE;
3367 }
3368 else if (jf->type == IPA_JF_PASS_THROUGH)
3369 ipa_set_controlled_uses (new_root_info,
3370 jf->value.pass_through.formal_id,
3371 IPA_UNDESCRIBED_USE);
3372 }
3373 }
3374
3375 /* Update jump functions and call note functions on inlining the call site CS.
3376 CS is expected to lead to a node already cloned by
3377 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3378 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3379 created. */
3380
3381 bool
3382 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3383 vec<cgraph_edge_p> *new_edges)
3384 {
3385 bool changed;
3386 /* Do nothing if the preparation phase has not been carried out yet
3387 (i.e. during early inlining). */
3388 if (!ipa_node_params_vector.exists ())
3389 return false;
3390 gcc_assert (ipa_edge_args_vector);
3391
3392 propagate_controlled_uses (cs);
3393 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3394
3395 return changed;
3396 }
3397
3398 /* Frees all dynamically allocated structures that the argument info points
3399 to. */
3400
3401 void
3402 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3403 {
3404 vec_free (args->jump_functions);
3405 memset (args, 0, sizeof (*args));
3406 }
3407
3408 /* Free all ipa_edge structures. */
3409
3410 void
3411 ipa_free_all_edge_args (void)
3412 {
3413 int i;
3414 struct ipa_edge_args *args;
3415
3416 if (!ipa_edge_args_vector)
3417 return;
3418
3419 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3420 ipa_free_edge_args_substructures (args);
3421
3422 vec_free (ipa_edge_args_vector);
3423 }
3424
3425 /* Frees all dynamically allocated structures that the param info points
3426 to. */
3427
3428 void
3429 ipa_free_node_params_substructures (struct ipa_node_params *info)
3430 {
3431 info->descriptors.release ();
3432 free (info->lattices);
3433 /* Lattice values and their sources are deallocated with their alocation
3434 pool. */
3435 info->known_vals.release ();
3436 memset (info, 0, sizeof (*info));
3437 }
3438
3439 /* Free all ipa_node_params structures. */
3440
3441 void
3442 ipa_free_all_node_params (void)
3443 {
3444 int i;
3445 struct ipa_node_params *info;
3446
3447 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3448 ipa_free_node_params_substructures (info);
3449
3450 ipa_node_params_vector.release ();
3451 }
3452
3453 /* Set the aggregate replacements of NODE to be AGGVALS. */
3454
3455 void
3456 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3457 struct ipa_agg_replacement_value *aggvals)
3458 {
3459 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3460 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3461
3462 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3463 }
3464
3465 /* Hook that is called by cgraph.c when an edge is removed. */
3466
3467 static void
3468 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3469 {
3470 struct ipa_edge_args *args;
3471
3472 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3473 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3474 return;
3475
3476 args = IPA_EDGE_REF (cs);
3477 if (args->jump_functions)
3478 {
3479 struct ipa_jump_func *jf;
3480 int i;
3481 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3482 {
3483 struct ipa_cst_ref_desc *rdesc;
3484 try_decrement_rdesc_refcount (jf);
3485 if (jf->type == IPA_JF_CONST
3486 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3487 && rdesc->cs == cs)
3488 rdesc->cs = NULL;
3489 }
3490 }
3491
3492 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3493 }
3494
3495 /* Hook that is called by cgraph.c when a node is removed. */
3496
3497 static void
3498 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3499 {
3500 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3501 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3502 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3503 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3504 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3505 }
3506
3507 /* Hook that is called by cgraph.c when an edge is duplicated. */
3508
3509 static void
3510 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3511 __attribute__((unused)) void *data)
3512 {
3513 struct ipa_edge_args *old_args, *new_args;
3514 unsigned int i;
3515
3516 ipa_check_create_edge_args ();
3517
3518 old_args = IPA_EDGE_REF (src);
3519 new_args = IPA_EDGE_REF (dst);
3520
3521 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3522
3523 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3524 {
3525 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3526 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3527
3528 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3529
3530 if (src_jf->type == IPA_JF_CONST)
3531 {
3532 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3533
3534 if (!src_rdesc)
3535 dst_jf->value.constant.rdesc = NULL;
3536 else if (src->caller == dst->caller)
3537 {
3538 struct ipa_ref *ref;
3539 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3540 gcc_checking_assert (n);
3541 ref = src->caller->find_reference (n, src->call_stmt,
3542 src->lto_stmt_uid);
3543 gcc_checking_assert (ref);
3544 dst->caller->clone_reference (ref, ref->stmt);
3545
3546 gcc_checking_assert (ipa_refdesc_pool);
3547 struct ipa_cst_ref_desc *dst_rdesc
3548 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3549 dst_rdesc->cs = dst;
3550 dst_rdesc->refcount = src_rdesc->refcount;
3551 dst_rdesc->next_duplicate = NULL;
3552 dst_jf->value.constant.rdesc = dst_rdesc;
3553 }
3554 else if (src_rdesc->cs == src)
3555 {
3556 struct ipa_cst_ref_desc *dst_rdesc;
3557 gcc_checking_assert (ipa_refdesc_pool);
3558 dst_rdesc
3559 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3560 dst_rdesc->cs = dst;
3561 dst_rdesc->refcount = src_rdesc->refcount;
3562 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3563 src_rdesc->next_duplicate = dst_rdesc;
3564 dst_jf->value.constant.rdesc = dst_rdesc;
3565 }
3566 else
3567 {
3568 struct ipa_cst_ref_desc *dst_rdesc;
3569 /* This can happen during inlining, when a JFUNC can refer to a
3570 reference taken in a function up in the tree of inline clones.
3571 We need to find the duplicate that refers to our tree of
3572 inline clones. */
3573
3574 gcc_assert (dst->caller->global.inlined_to);
3575 for (dst_rdesc = src_rdesc->next_duplicate;
3576 dst_rdesc;
3577 dst_rdesc = dst_rdesc->next_duplicate)
3578 {
3579 struct cgraph_node *top;
3580 top = dst_rdesc->cs->caller->global.inlined_to
3581 ? dst_rdesc->cs->caller->global.inlined_to
3582 : dst_rdesc->cs->caller;
3583 if (dst->caller->global.inlined_to == top)
3584 break;
3585 }
3586 gcc_assert (dst_rdesc);
3587 dst_jf->value.constant.rdesc = dst_rdesc;
3588 }
3589 }
3590 }
3591 }
3592
3593 /* Hook that is called by cgraph.c when a node is duplicated. */
3594
3595 static void
3596 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3597 ATTRIBUTE_UNUSED void *data)
3598 {
3599 struct ipa_node_params *old_info, *new_info;
3600 struct ipa_agg_replacement_value *old_av, *new_av;
3601
3602 ipa_check_create_node_params ();
3603 old_info = IPA_NODE_REF (src);
3604 new_info = IPA_NODE_REF (dst);
3605
3606 new_info->descriptors = old_info->descriptors.copy ();
3607 new_info->lattices = NULL;
3608 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3609
3610 new_info->analysis_done = old_info->analysis_done;
3611 new_info->node_enqueued = old_info->node_enqueued;
3612
3613 old_av = ipa_get_agg_replacements_for_node (src);
3614 if (!old_av)
3615 return;
3616
3617 new_av = NULL;
3618 while (old_av)
3619 {
3620 struct ipa_agg_replacement_value *v;
3621
3622 v = ggc_alloc<ipa_agg_replacement_value> ();
3623 memcpy (v, old_av, sizeof (*v));
3624 v->next = new_av;
3625 new_av = v;
3626 old_av = old_av->next;
3627 }
3628 ipa_set_node_agg_value_chain (dst, new_av);
3629 }
3630
3631
3632 /* Analyze newly added function into callgraph. */
3633
3634 static void
3635 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3636 {
3637 if (cgraph_function_with_gimple_body_p (node))
3638 ipa_analyze_node (node);
3639 }
3640
3641 /* Register our cgraph hooks if they are not already there. */
3642
3643 void
3644 ipa_register_cgraph_hooks (void)
3645 {
3646 if (!edge_removal_hook_holder)
3647 edge_removal_hook_holder =
3648 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3649 if (!node_removal_hook_holder)
3650 node_removal_hook_holder =
3651 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3652 if (!edge_duplication_hook_holder)
3653 edge_duplication_hook_holder =
3654 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3655 if (!node_duplication_hook_holder)
3656 node_duplication_hook_holder =
3657 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3658 function_insertion_hook_holder =
3659 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3660 }
3661
3662 /* Unregister our cgraph hooks if they are not already there. */
3663
3664 static void
3665 ipa_unregister_cgraph_hooks (void)
3666 {
3667 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3668 edge_removal_hook_holder = NULL;
3669 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3670 node_removal_hook_holder = NULL;
3671 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3672 edge_duplication_hook_holder = NULL;
3673 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3674 node_duplication_hook_holder = NULL;
3675 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3676 function_insertion_hook_holder = NULL;
3677 }
3678
3679 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3680 longer needed after ipa-cp. */
3681
3682 void
3683 ipa_free_all_structures_after_ipa_cp (void)
3684 {
3685 if (!optimize)
3686 {
3687 ipa_free_all_edge_args ();
3688 ipa_free_all_node_params ();
3689 free_alloc_pool (ipcp_sources_pool);
3690 free_alloc_pool (ipcp_values_pool);
3691 free_alloc_pool (ipcp_agg_lattice_pool);
3692 ipa_unregister_cgraph_hooks ();
3693 if (ipa_refdesc_pool)
3694 free_alloc_pool (ipa_refdesc_pool);
3695 }
3696 }
3697
3698 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3699 longer needed after indirect inlining. */
3700
3701 void
3702 ipa_free_all_structures_after_iinln (void)
3703 {
3704 ipa_free_all_edge_args ();
3705 ipa_free_all_node_params ();
3706 ipa_unregister_cgraph_hooks ();
3707 if (ipcp_sources_pool)
3708 free_alloc_pool (ipcp_sources_pool);
3709 if (ipcp_values_pool)
3710 free_alloc_pool (ipcp_values_pool);
3711 if (ipcp_agg_lattice_pool)
3712 free_alloc_pool (ipcp_agg_lattice_pool);
3713 if (ipa_refdesc_pool)
3714 free_alloc_pool (ipa_refdesc_pool);
3715 }
3716
3717 /* Print ipa_tree_map data structures of all functions in the
3718 callgraph to F. */
3719
3720 void
3721 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3722 {
3723 int i, count;
3724 struct ipa_node_params *info;
3725
3726 if (!node->definition)
3727 return;
3728 info = IPA_NODE_REF (node);
3729 fprintf (f, " function %s/%i parameter descriptors:\n",
3730 node->name (), node->order);
3731 count = ipa_get_param_count (info);
3732 for (i = 0; i < count; i++)
3733 {
3734 int c;
3735
3736 fprintf (f, " ");
3737 ipa_dump_param (f, info, i);
3738 if (ipa_is_param_used (info, i))
3739 fprintf (f, " used");
3740 c = ipa_get_controlled_uses (info, i);
3741 if (c == IPA_UNDESCRIBED_USE)
3742 fprintf (f, " undescribed_use");
3743 else
3744 fprintf (f, " controlled_uses=%i", c);
3745 fprintf (f, "\n");
3746 }
3747 }
3748
3749 /* Print ipa_tree_map data structures of all functions in the
3750 callgraph to F. */
3751
3752 void
3753 ipa_print_all_params (FILE * f)
3754 {
3755 struct cgraph_node *node;
3756
3757 fprintf (f, "\nFunction parameters:\n");
3758 FOR_EACH_FUNCTION (node)
3759 ipa_print_node_params (f, node);
3760 }
3761
3762 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3763
3764 vec<tree>
3765 ipa_get_vector_of_formal_parms (tree fndecl)
3766 {
3767 vec<tree> args;
3768 int count;
3769 tree parm;
3770
3771 gcc_assert (!flag_wpa);
3772 count = count_formal_params (fndecl);
3773 args.create (count);
3774 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3775 args.quick_push (parm);
3776
3777 return args;
3778 }
3779
3780 /* Return a heap allocated vector containing types of formal parameters of
3781 function type FNTYPE. */
3782
3783 vec<tree>
3784 ipa_get_vector_of_formal_parm_types (tree fntype)
3785 {
3786 vec<tree> types;
3787 int count = 0;
3788 tree t;
3789
3790 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3791 count++;
3792
3793 types.create (count);
3794 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3795 types.quick_push (TREE_VALUE (t));
3796
3797 return types;
3798 }
3799
3800 /* Modify the function declaration FNDECL and its type according to the plan in
3801 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3802 to reflect the actual parameters being modified which are determined by the
3803 base_index field. */
3804
3805 void
3806 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3807 {
3808 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3809 tree orig_type = TREE_TYPE (fndecl);
3810 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3811
3812 /* The following test is an ugly hack, some functions simply don't have any
3813 arguments in their type. This is probably a bug but well... */
3814 bool care_for_types = (old_arg_types != NULL_TREE);
3815 bool last_parm_void;
3816 vec<tree> otypes;
3817 if (care_for_types)
3818 {
3819 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3820 == void_type_node);
3821 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3822 if (last_parm_void)
3823 gcc_assert (oparms.length () + 1 == otypes.length ());
3824 else
3825 gcc_assert (oparms.length () == otypes.length ());
3826 }
3827 else
3828 {
3829 last_parm_void = false;
3830 otypes.create (0);
3831 }
3832
3833 int len = adjustments.length ();
3834 tree *link = &DECL_ARGUMENTS (fndecl);
3835 tree new_arg_types = NULL;
3836 for (int i = 0; i < len; i++)
3837 {
3838 struct ipa_parm_adjustment *adj;
3839 gcc_assert (link);
3840
3841 adj = &adjustments[i];
3842 tree parm;
3843 if (adj->op == IPA_PARM_OP_NEW)
3844 parm = NULL;
3845 else
3846 parm = oparms[adj->base_index];
3847 adj->base = parm;
3848
3849 if (adj->op == IPA_PARM_OP_COPY)
3850 {
3851 if (care_for_types)
3852 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3853 new_arg_types);
3854 *link = parm;
3855 link = &DECL_CHAIN (parm);
3856 }
3857 else if (adj->op != IPA_PARM_OP_REMOVE)
3858 {
3859 tree new_parm;
3860 tree ptype;
3861
3862 if (adj->by_ref)
3863 ptype = build_pointer_type (adj->type);
3864 else
3865 {
3866 ptype = adj->type;
3867 if (is_gimple_reg_type (ptype))
3868 {
3869 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3870 if (TYPE_ALIGN (ptype) < malign)
3871 ptype = build_aligned_type (ptype, malign);
3872 }
3873 }
3874
3875 if (care_for_types)
3876 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3877
3878 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3879 ptype);
3880 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3881 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3882 DECL_ARTIFICIAL (new_parm) = 1;
3883 DECL_ARG_TYPE (new_parm) = ptype;
3884 DECL_CONTEXT (new_parm) = fndecl;
3885 TREE_USED (new_parm) = 1;
3886 DECL_IGNORED_P (new_parm) = 1;
3887 layout_decl (new_parm, 0);
3888
3889 if (adj->op == IPA_PARM_OP_NEW)
3890 adj->base = NULL;
3891 else
3892 adj->base = parm;
3893 adj->new_decl = new_parm;
3894
3895 *link = new_parm;
3896 link = &DECL_CHAIN (new_parm);
3897 }
3898 }
3899
3900 *link = NULL_TREE;
3901
3902 tree new_reversed = NULL;
3903 if (care_for_types)
3904 {
3905 new_reversed = nreverse (new_arg_types);
3906 if (last_parm_void)
3907 {
3908 if (new_reversed)
3909 TREE_CHAIN (new_arg_types) = void_list_node;
3910 else
3911 new_reversed = void_list_node;
3912 }
3913 }
3914
3915 /* Use copy_node to preserve as much as possible from original type
3916 (debug info, attribute lists etc.)
3917 Exception is METHOD_TYPEs must have THIS argument.
3918 When we are asked to remove it, we need to build new FUNCTION_TYPE
3919 instead. */
3920 tree new_type = NULL;
3921 if (TREE_CODE (orig_type) != METHOD_TYPE
3922 || (adjustments[0].op == IPA_PARM_OP_COPY
3923 && adjustments[0].base_index == 0))
3924 {
3925 new_type = build_distinct_type_copy (orig_type);
3926 TYPE_ARG_TYPES (new_type) = new_reversed;
3927 }
3928 else
3929 {
3930 new_type
3931 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3932 new_reversed));
3933 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3934 DECL_VINDEX (fndecl) = NULL_TREE;
3935 }
3936
3937 /* When signature changes, we need to clear builtin info. */
3938 if (DECL_BUILT_IN (fndecl))
3939 {
3940 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3941 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3942 }
3943
3944 /* This is a new type, not a copy of an old type. Need to reassociate
3945 variants. We can handle everything except the main variant lazily. */
3946 tree t = TYPE_MAIN_VARIANT (orig_type);
3947 if (orig_type != t)
3948 {
3949 TYPE_MAIN_VARIANT (new_type) = t;
3950 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3951 TYPE_NEXT_VARIANT (t) = new_type;
3952 }
3953 else
3954 {
3955 TYPE_MAIN_VARIANT (new_type) = new_type;
3956 TYPE_NEXT_VARIANT (new_type) = NULL;
3957 }
3958
3959 TREE_TYPE (fndecl) = new_type;
3960 DECL_VIRTUAL_P (fndecl) = 0;
3961 DECL_LANG_SPECIFIC (fndecl) = NULL;
3962 otypes.release ();
3963 oparms.release ();
3964 }
3965
3966 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3967 If this is a directly recursive call, CS must be NULL. Otherwise it must
3968 contain the corresponding call graph edge. */
3969
3970 void
3971 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3972 ipa_parm_adjustment_vec adjustments)
3973 {
3974 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3975 vec<tree> vargs;
3976 vec<tree, va_gc> **debug_args = NULL;
3977 gimple new_stmt;
3978 gimple_stmt_iterator gsi, prev_gsi;
3979 tree callee_decl;
3980 int i, len;
3981
3982 len = adjustments.length ();
3983 vargs.create (len);
3984 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3985 current_node->remove_stmt_references (stmt);
3986
3987 gsi = gsi_for_stmt (stmt);
3988 prev_gsi = gsi;
3989 gsi_prev (&prev_gsi);
3990 for (i = 0; i < len; i++)
3991 {
3992 struct ipa_parm_adjustment *adj;
3993
3994 adj = &adjustments[i];
3995
3996 if (adj->op == IPA_PARM_OP_COPY)
3997 {
3998 tree arg = gimple_call_arg (stmt, adj->base_index);
3999
4000 vargs.quick_push (arg);
4001 }
4002 else if (adj->op != IPA_PARM_OP_REMOVE)
4003 {
4004 tree expr, base, off;
4005 location_t loc;
4006 unsigned int deref_align = 0;
4007 bool deref_base = false;
4008
4009 /* We create a new parameter out of the value of the old one, we can
4010 do the following kind of transformations:
4011
4012 - A scalar passed by reference is converted to a scalar passed by
4013 value. (adj->by_ref is false and the type of the original
4014 actual argument is a pointer to a scalar).
4015
4016 - A part of an aggregate is passed instead of the whole aggregate.
4017 The part can be passed either by value or by reference, this is
4018 determined by value of adj->by_ref. Moreover, the code below
4019 handles both situations when the original aggregate is passed by
4020 value (its type is not a pointer) and when it is passed by
4021 reference (it is a pointer to an aggregate).
4022
4023 When the new argument is passed by reference (adj->by_ref is true)
4024 it must be a part of an aggregate and therefore we form it by
4025 simply taking the address of a reference inside the original
4026 aggregate. */
4027
4028 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4029 base = gimple_call_arg (stmt, adj->base_index);
4030 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4031 : EXPR_LOCATION (base);
4032
4033 if (TREE_CODE (base) != ADDR_EXPR
4034 && POINTER_TYPE_P (TREE_TYPE (base)))
4035 off = build_int_cst (adj->alias_ptr_type,
4036 adj->offset / BITS_PER_UNIT);
4037 else
4038 {
4039 HOST_WIDE_INT base_offset;
4040 tree prev_base;
4041 bool addrof;
4042
4043 if (TREE_CODE (base) == ADDR_EXPR)
4044 {
4045 base = TREE_OPERAND (base, 0);
4046 addrof = true;
4047 }
4048 else
4049 addrof = false;
4050 prev_base = base;
4051 base = get_addr_base_and_unit_offset (base, &base_offset);
4052 /* Aggregate arguments can have non-invariant addresses. */
4053 if (!base)
4054 {
4055 base = build_fold_addr_expr (prev_base);
4056 off = build_int_cst (adj->alias_ptr_type,
4057 adj->offset / BITS_PER_UNIT);
4058 }
4059 else if (TREE_CODE (base) == MEM_REF)
4060 {
4061 if (!addrof)
4062 {
4063 deref_base = true;
4064 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4065 }
4066 off = build_int_cst (adj->alias_ptr_type,
4067 base_offset
4068 + adj->offset / BITS_PER_UNIT);
4069 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4070 off);
4071 base = TREE_OPERAND (base, 0);
4072 }
4073 else
4074 {
4075 off = build_int_cst (adj->alias_ptr_type,
4076 base_offset
4077 + adj->offset / BITS_PER_UNIT);
4078 base = build_fold_addr_expr (base);
4079 }
4080 }
4081
4082 if (!adj->by_ref)
4083 {
4084 tree type = adj->type;
4085 unsigned int align;
4086 unsigned HOST_WIDE_INT misalign;
4087
4088 if (deref_base)
4089 {
4090 align = deref_align;
4091 misalign = 0;
4092 }
4093 else
4094 {
4095 get_pointer_alignment_1 (base, &align, &misalign);
4096 if (TYPE_ALIGN (type) > align)
4097 align = TYPE_ALIGN (type);
4098 }
4099 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4100 * BITS_PER_UNIT);
4101 misalign = misalign & (align - 1);
4102 if (misalign != 0)
4103 align = (misalign & -misalign);
4104 if (align < TYPE_ALIGN (type))
4105 type = build_aligned_type (type, align);
4106 base = force_gimple_operand_gsi (&gsi, base,
4107 true, NULL, true, GSI_SAME_STMT);
4108 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4109 /* If expr is not a valid gimple call argument emit
4110 a load into a temporary. */
4111 if (is_gimple_reg_type (TREE_TYPE (expr)))
4112 {
4113 gimple tem = gimple_build_assign (NULL_TREE, expr);
4114 if (gimple_in_ssa_p (cfun))
4115 {
4116 gimple_set_vuse (tem, gimple_vuse (stmt));
4117 expr = make_ssa_name (TREE_TYPE (expr), tem);
4118 }
4119 else
4120 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4121 gimple_assign_set_lhs (tem, expr);
4122 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4123 }
4124 }
4125 else
4126 {
4127 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4128 expr = build_fold_addr_expr (expr);
4129 expr = force_gimple_operand_gsi (&gsi, expr,
4130 true, NULL, true, GSI_SAME_STMT);
4131 }
4132 vargs.quick_push (expr);
4133 }
4134 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4135 {
4136 unsigned int ix;
4137 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4138 gimple def_temp;
4139
4140 arg = gimple_call_arg (stmt, adj->base_index);
4141 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4142 {
4143 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4144 continue;
4145 arg = fold_convert_loc (gimple_location (stmt),
4146 TREE_TYPE (origin), arg);
4147 }
4148 if (debug_args == NULL)
4149 debug_args = decl_debug_args_insert (callee_decl);
4150 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4151 if (ddecl == origin)
4152 {
4153 ddecl = (**debug_args)[ix + 1];
4154 break;
4155 }
4156 if (ddecl == NULL)
4157 {
4158 ddecl = make_node (DEBUG_EXPR_DECL);
4159 DECL_ARTIFICIAL (ddecl) = 1;
4160 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4161 DECL_MODE (ddecl) = DECL_MODE (origin);
4162
4163 vec_safe_push (*debug_args, origin);
4164 vec_safe_push (*debug_args, ddecl);
4165 }
4166 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4167 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4168 }
4169 }
4170
4171 if (dump_file && (dump_flags & TDF_DETAILS))
4172 {
4173 fprintf (dump_file, "replacing stmt:");
4174 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4175 }
4176
4177 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4178 vargs.release ();
4179 if (gimple_call_lhs (stmt))
4180 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4181
4182 gimple_set_block (new_stmt, gimple_block (stmt));
4183 if (gimple_has_location (stmt))
4184 gimple_set_location (new_stmt, gimple_location (stmt));
4185 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4186 gimple_call_copy_flags (new_stmt, stmt);
4187 if (gimple_in_ssa_p (cfun))
4188 {
4189 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4190 if (gimple_vdef (stmt))
4191 {
4192 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4193 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4194 }
4195 }
4196
4197 if (dump_file && (dump_flags & TDF_DETAILS))
4198 {
4199 fprintf (dump_file, "with stmt:");
4200 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4201 fprintf (dump_file, "\n");
4202 }
4203 gsi_replace (&gsi, new_stmt, true);
4204 if (cs)
4205 cgraph_set_call_stmt (cs, new_stmt);
4206 do
4207 {
4208 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
4209 gsi_prev (&gsi);
4210 }
4211 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4212 }
4213
4214 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4215 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4216 specifies whether the function should care about type incompatibility the
4217 current and new expressions. If it is false, the function will leave
4218 incompatibility issues to the caller. Return true iff the expression
4219 was modified. */
4220
4221 bool
4222 ipa_modify_expr (tree *expr, bool convert,
4223 ipa_parm_adjustment_vec adjustments)
4224 {
4225 struct ipa_parm_adjustment *cand
4226 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4227 if (!cand)
4228 return false;
4229
4230 tree src;
4231 if (cand->by_ref)
4232 src = build_simple_mem_ref (cand->new_decl);
4233 else
4234 src = cand->new_decl;
4235
4236 if (dump_file && (dump_flags & TDF_DETAILS))
4237 {
4238 fprintf (dump_file, "About to replace expr ");
4239 print_generic_expr (dump_file, *expr, 0);
4240 fprintf (dump_file, " with ");
4241 print_generic_expr (dump_file, src, 0);
4242 fprintf (dump_file, "\n");
4243 }
4244
4245 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4246 {
4247 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4248 *expr = vce;
4249 }
4250 else
4251 *expr = src;
4252 return true;
4253 }
4254
4255 /* If T is an SSA_NAME, return NULL if it is not a default def or
4256 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4257 the base variable is always returned, regardless if it is a default
4258 def. Return T if it is not an SSA_NAME. */
4259
4260 static tree
4261 get_ssa_base_param (tree t, bool ignore_default_def)
4262 {
4263 if (TREE_CODE (t) == SSA_NAME)
4264 {
4265 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4266 return SSA_NAME_VAR (t);
4267 else
4268 return NULL_TREE;
4269 }
4270 return t;
4271 }
4272
4273 /* Given an expression, return an adjustment entry specifying the
4274 transformation to be done on EXPR. If no suitable adjustment entry
4275 was found, returns NULL.
4276
4277 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4278 default def, otherwise bail on them.
4279
4280 If CONVERT is non-NULL, this function will set *CONVERT if the
4281 expression provided is a component reference. ADJUSTMENTS is the
4282 adjustments vector. */
4283
4284 ipa_parm_adjustment *
4285 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4286 ipa_parm_adjustment_vec adjustments,
4287 bool ignore_default_def)
4288 {
4289 if (TREE_CODE (**expr) == BIT_FIELD_REF
4290 || TREE_CODE (**expr) == IMAGPART_EXPR
4291 || TREE_CODE (**expr) == REALPART_EXPR)
4292 {
4293 *expr = &TREE_OPERAND (**expr, 0);
4294 if (convert)
4295 *convert = true;
4296 }
4297
4298 HOST_WIDE_INT offset, size, max_size;
4299 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4300 if (!base || size == -1 || max_size == -1)
4301 return NULL;
4302
4303 if (TREE_CODE (base) == MEM_REF)
4304 {
4305 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4306 base = TREE_OPERAND (base, 0);
4307 }
4308
4309 base = get_ssa_base_param (base, ignore_default_def);
4310 if (!base || TREE_CODE (base) != PARM_DECL)
4311 return NULL;
4312
4313 struct ipa_parm_adjustment *cand = NULL;
4314 unsigned int len = adjustments.length ();
4315 for (unsigned i = 0; i < len; i++)
4316 {
4317 struct ipa_parm_adjustment *adj = &adjustments[i];
4318
4319 if (adj->base == base
4320 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4321 {
4322 cand = adj;
4323 break;
4324 }
4325 }
4326
4327 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4328 return NULL;
4329 return cand;
4330 }
4331
4332 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4333
4334 static bool
4335 index_in_adjustments_multiple_times_p (int base_index,
4336 ipa_parm_adjustment_vec adjustments)
4337 {
4338 int i, len = adjustments.length ();
4339 bool one = false;
4340
4341 for (i = 0; i < len; i++)
4342 {
4343 struct ipa_parm_adjustment *adj;
4344 adj = &adjustments[i];
4345
4346 if (adj->base_index == base_index)
4347 {
4348 if (one)
4349 return true;
4350 else
4351 one = true;
4352 }
4353 }
4354 return false;
4355 }
4356
4357
4358 /* Return adjustments that should have the same effect on function parameters
4359 and call arguments as if they were first changed according to adjustments in
4360 INNER and then by adjustments in OUTER. */
4361
4362 ipa_parm_adjustment_vec
4363 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4364 ipa_parm_adjustment_vec outer)
4365 {
4366 int i, outlen = outer.length ();
4367 int inlen = inner.length ();
4368 int removals = 0;
4369 ipa_parm_adjustment_vec adjustments, tmp;
4370
4371 tmp.create (inlen);
4372 for (i = 0; i < inlen; i++)
4373 {
4374 struct ipa_parm_adjustment *n;
4375 n = &inner[i];
4376
4377 if (n->op == IPA_PARM_OP_REMOVE)
4378 removals++;
4379 else
4380 {
4381 /* FIXME: Handling of new arguments are not implemented yet. */
4382 gcc_assert (n->op != IPA_PARM_OP_NEW);
4383 tmp.quick_push (*n);
4384 }
4385 }
4386
4387 adjustments.create (outlen + removals);
4388 for (i = 0; i < outlen; i++)
4389 {
4390 struct ipa_parm_adjustment r;
4391 struct ipa_parm_adjustment *out = &outer[i];
4392 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4393
4394 memset (&r, 0, sizeof (r));
4395 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4396 if (out->op == IPA_PARM_OP_REMOVE)
4397 {
4398 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4399 {
4400 r.op = IPA_PARM_OP_REMOVE;
4401 adjustments.quick_push (r);
4402 }
4403 continue;
4404 }
4405 else
4406 {
4407 /* FIXME: Handling of new arguments are not implemented yet. */
4408 gcc_assert (out->op != IPA_PARM_OP_NEW);
4409 }
4410
4411 r.base_index = in->base_index;
4412 r.type = out->type;
4413
4414 /* FIXME: Create nonlocal value too. */
4415
4416 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4417 r.op = IPA_PARM_OP_COPY;
4418 else if (in->op == IPA_PARM_OP_COPY)
4419 r.offset = out->offset;
4420 else if (out->op == IPA_PARM_OP_COPY)
4421 r.offset = in->offset;
4422 else
4423 r.offset = in->offset + out->offset;
4424 adjustments.quick_push (r);
4425 }
4426
4427 for (i = 0; i < inlen; i++)
4428 {
4429 struct ipa_parm_adjustment *n = &inner[i];
4430
4431 if (n->op == IPA_PARM_OP_REMOVE)
4432 adjustments.quick_push (*n);
4433 }
4434
4435 tmp.release ();
4436 return adjustments;
4437 }
4438
4439 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4440 friendly way, assuming they are meant to be applied to FNDECL. */
4441
4442 void
4443 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4444 tree fndecl)
4445 {
4446 int i, len = adjustments.length ();
4447 bool first = true;
4448 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4449
4450 fprintf (file, "IPA param adjustments: ");
4451 for (i = 0; i < len; i++)
4452 {
4453 struct ipa_parm_adjustment *adj;
4454 adj = &adjustments[i];
4455
4456 if (!first)
4457 fprintf (file, " ");
4458 else
4459 first = false;
4460
4461 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4462 print_generic_expr (file, parms[adj->base_index], 0);
4463 if (adj->base)
4464 {
4465 fprintf (file, ", base: ");
4466 print_generic_expr (file, adj->base, 0);
4467 }
4468 if (adj->new_decl)
4469 {
4470 fprintf (file, ", new_decl: ");
4471 print_generic_expr (file, adj->new_decl, 0);
4472 }
4473 if (adj->new_ssa_base)
4474 {
4475 fprintf (file, ", new_ssa_base: ");
4476 print_generic_expr (file, adj->new_ssa_base, 0);
4477 }
4478
4479 if (adj->op == IPA_PARM_OP_COPY)
4480 fprintf (file, ", copy_param");
4481 else if (adj->op == IPA_PARM_OP_REMOVE)
4482 fprintf (file, ", remove_param");
4483 else
4484 fprintf (file, ", offset %li", (long) adj->offset);
4485 if (adj->by_ref)
4486 fprintf (file, ", by_ref");
4487 print_node_brief (file, ", type: ", adj->type, 0);
4488 fprintf (file, "\n");
4489 }
4490 parms.release ();
4491 }
4492
4493 /* Dump the AV linked list. */
4494
4495 void
4496 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4497 {
4498 bool comma = false;
4499 fprintf (f, " Aggregate replacements:");
4500 for (; av; av = av->next)
4501 {
4502 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4503 av->index, av->offset);
4504 print_generic_expr (f, av->value, 0);
4505 comma = true;
4506 }
4507 fprintf (f, "\n");
4508 }
4509
4510 /* Stream out jump function JUMP_FUNC to OB. */
4511
4512 static void
4513 ipa_write_jump_function (struct output_block *ob,
4514 struct ipa_jump_func *jump_func)
4515 {
4516 struct ipa_agg_jf_item *item;
4517 struct bitpack_d bp;
4518 int i, count;
4519
4520 streamer_write_uhwi (ob, jump_func->type);
4521 switch (jump_func->type)
4522 {
4523 case IPA_JF_UNKNOWN:
4524 break;
4525 case IPA_JF_KNOWN_TYPE:
4526 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4527 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4528 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4529 break;
4530 case IPA_JF_CONST:
4531 gcc_assert (
4532 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4533 stream_write_tree (ob, jump_func->value.constant.value, true);
4534 break;
4535 case IPA_JF_PASS_THROUGH:
4536 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4537 if (jump_func->value.pass_through.operation == NOP_EXPR)
4538 {
4539 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4540 bp = bitpack_create (ob->main_stream);
4541 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4542 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4543 streamer_write_bitpack (&bp);
4544 }
4545 else
4546 {
4547 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4548 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4549 }
4550 break;
4551 case IPA_JF_ANCESTOR:
4552 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4553 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4554 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4555 bp = bitpack_create (ob->main_stream);
4556 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4557 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4558 streamer_write_bitpack (&bp);
4559 break;
4560 }
4561
4562 count = vec_safe_length (jump_func->agg.items);
4563 streamer_write_uhwi (ob, count);
4564 if (count)
4565 {
4566 bp = bitpack_create (ob->main_stream);
4567 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4568 streamer_write_bitpack (&bp);
4569 }
4570
4571 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4572 {
4573 streamer_write_uhwi (ob, item->offset);
4574 stream_write_tree (ob, item->value, true);
4575 }
4576 }
4577
4578 /* Read in jump function JUMP_FUNC from IB. */
4579
4580 static void
4581 ipa_read_jump_function (struct lto_input_block *ib,
4582 struct ipa_jump_func *jump_func,
4583 struct cgraph_edge *cs,
4584 struct data_in *data_in)
4585 {
4586 enum jump_func_type jftype;
4587 enum tree_code operation;
4588 int i, count;
4589
4590 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4591 switch (jftype)
4592 {
4593 case IPA_JF_UNKNOWN:
4594 jump_func->type = IPA_JF_UNKNOWN;
4595 break;
4596 case IPA_JF_KNOWN_TYPE:
4597 {
4598 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4599 tree base_type = stream_read_tree (ib, data_in);
4600 tree component_type = stream_read_tree (ib, data_in);
4601
4602 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4603 break;
4604 }
4605 case IPA_JF_CONST:
4606 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4607 break;
4608 case IPA_JF_PASS_THROUGH:
4609 operation = (enum tree_code) streamer_read_uhwi (ib);
4610 if (operation == NOP_EXPR)
4611 {
4612 int formal_id = streamer_read_uhwi (ib);
4613 struct bitpack_d bp = streamer_read_bitpack (ib);
4614 bool agg_preserved = bp_unpack_value (&bp, 1);
4615 bool type_preserved = bp_unpack_value (&bp, 1);
4616 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4617 type_preserved);
4618 }
4619 else
4620 {
4621 tree operand = stream_read_tree (ib, data_in);
4622 int formal_id = streamer_read_uhwi (ib);
4623 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4624 operation);
4625 }
4626 break;
4627 case IPA_JF_ANCESTOR:
4628 {
4629 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4630 tree type = stream_read_tree (ib, data_in);
4631 int formal_id = streamer_read_uhwi (ib);
4632 struct bitpack_d bp = streamer_read_bitpack (ib);
4633 bool agg_preserved = bp_unpack_value (&bp, 1);
4634 bool type_preserved = bp_unpack_value (&bp, 1);
4635
4636 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4637 type_preserved);
4638 break;
4639 }
4640 }
4641
4642 count = streamer_read_uhwi (ib);
4643 vec_alloc (jump_func->agg.items, count);
4644 if (count)
4645 {
4646 struct bitpack_d bp = streamer_read_bitpack (ib);
4647 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4648 }
4649 for (i = 0; i < count; i++)
4650 {
4651 struct ipa_agg_jf_item item;
4652 item.offset = streamer_read_uhwi (ib);
4653 item.value = stream_read_tree (ib, data_in);
4654 jump_func->agg.items->quick_push (item);
4655 }
4656 }
4657
4658 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4659 relevant to indirect inlining to OB. */
4660
4661 static void
4662 ipa_write_indirect_edge_info (struct output_block *ob,
4663 struct cgraph_edge *cs)
4664 {
4665 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4666 struct bitpack_d bp;
4667
4668 streamer_write_hwi (ob, ii->param_index);
4669 streamer_write_hwi (ob, ii->offset);
4670 bp = bitpack_create (ob->main_stream);
4671 bp_pack_value (&bp, ii->polymorphic, 1);
4672 bp_pack_value (&bp, ii->agg_contents, 1);
4673 bp_pack_value (&bp, ii->member_ptr, 1);
4674 bp_pack_value (&bp, ii->by_ref, 1);
4675 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4676 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4677 streamer_write_bitpack (&bp);
4678
4679 if (ii->polymorphic)
4680 {
4681 streamer_write_hwi (ob, ii->otr_token);
4682 stream_write_tree (ob, ii->otr_type, true);
4683 stream_write_tree (ob, ii->outer_type, true);
4684 }
4685 }
4686
4687 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4688 relevant to indirect inlining from IB. */
4689
4690 static void
4691 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4692 struct data_in *data_in ATTRIBUTE_UNUSED,
4693 struct cgraph_edge *cs)
4694 {
4695 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4696 struct bitpack_d bp;
4697
4698 ii->param_index = (int) streamer_read_hwi (ib);
4699 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4700 bp = streamer_read_bitpack (ib);
4701 ii->polymorphic = bp_unpack_value (&bp, 1);
4702 ii->agg_contents = bp_unpack_value (&bp, 1);
4703 ii->member_ptr = bp_unpack_value (&bp, 1);
4704 ii->by_ref = bp_unpack_value (&bp, 1);
4705 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4706 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4707 if (ii->polymorphic)
4708 {
4709 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4710 ii->otr_type = stream_read_tree (ib, data_in);
4711 ii->outer_type = stream_read_tree (ib, data_in);
4712 }
4713 }
4714
4715 /* Stream out NODE info to OB. */
4716
4717 static void
4718 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4719 {
4720 int node_ref;
4721 lto_symtab_encoder_t encoder;
4722 struct ipa_node_params *info = IPA_NODE_REF (node);
4723 int j;
4724 struct cgraph_edge *e;
4725 struct bitpack_d bp;
4726
4727 encoder = ob->decl_state->symtab_node_encoder;
4728 node_ref = lto_symtab_encoder_encode (encoder, node);
4729 streamer_write_uhwi (ob, node_ref);
4730
4731 streamer_write_uhwi (ob, ipa_get_param_count (info));
4732 for (j = 0; j < ipa_get_param_count (info); j++)
4733 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4734 bp = bitpack_create (ob->main_stream);
4735 gcc_assert (info->analysis_done
4736 || ipa_get_param_count (info) == 0);
4737 gcc_assert (!info->node_enqueued);
4738 gcc_assert (!info->ipcp_orig_node);
4739 for (j = 0; j < ipa_get_param_count (info); j++)
4740 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4741 streamer_write_bitpack (&bp);
4742 for (j = 0; j < ipa_get_param_count (info); j++)
4743 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4744 for (e = node->callees; e; e = e->next_callee)
4745 {
4746 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4747
4748 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4749 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4750 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4751 }
4752 for (e = node->indirect_calls; e; e = e->next_callee)
4753 {
4754 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4755
4756 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4757 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4758 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4759 ipa_write_indirect_edge_info (ob, e);
4760 }
4761 }
4762
4763 /* Stream in NODE info from IB. */
4764
4765 static void
4766 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4767 struct data_in *data_in)
4768 {
4769 struct ipa_node_params *info = IPA_NODE_REF (node);
4770 int k;
4771 struct cgraph_edge *e;
4772 struct bitpack_d bp;
4773
4774 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4775
4776 for (k = 0; k < ipa_get_param_count (info); k++)
4777 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4778
4779 bp = streamer_read_bitpack (ib);
4780 if (ipa_get_param_count (info) != 0)
4781 info->analysis_done = true;
4782 info->node_enqueued = false;
4783 for (k = 0; k < ipa_get_param_count (info); k++)
4784 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4785 for (k = 0; k < ipa_get_param_count (info); k++)
4786 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4787 for (e = node->callees; e; e = e->next_callee)
4788 {
4789 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4790 int count = streamer_read_uhwi (ib);
4791
4792 if (!count)
4793 continue;
4794 vec_safe_grow_cleared (args->jump_functions, count);
4795
4796 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4797 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4798 data_in);
4799 }
4800 for (e = node->indirect_calls; e; e = e->next_callee)
4801 {
4802 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4803 int count = streamer_read_uhwi (ib);
4804
4805 if (count)
4806 {
4807 vec_safe_grow_cleared (args->jump_functions, count);
4808 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4809 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4810 data_in);
4811 }
4812 ipa_read_indirect_edge_info (ib, data_in, e);
4813 }
4814 }
4815
4816 /* Write jump functions for nodes in SET. */
4817
4818 void
4819 ipa_prop_write_jump_functions (void)
4820 {
4821 struct cgraph_node *node;
4822 struct output_block *ob;
4823 unsigned int count = 0;
4824 lto_symtab_encoder_iterator lsei;
4825 lto_symtab_encoder_t encoder;
4826
4827
4828 if (!ipa_node_params_vector.exists ())
4829 return;
4830
4831 ob = create_output_block (LTO_section_jump_functions);
4832 encoder = ob->decl_state->symtab_node_encoder;
4833 ob->cgraph_node = NULL;
4834 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4835 lsei_next_function_in_partition (&lsei))
4836 {
4837 node = lsei_cgraph_node (lsei);
4838 if (cgraph_function_with_gimple_body_p (node)
4839 && IPA_NODE_REF (node) != NULL)
4840 count++;
4841 }
4842
4843 streamer_write_uhwi (ob, count);
4844
4845 /* Process all of the functions. */
4846 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4847 lsei_next_function_in_partition (&lsei))
4848 {
4849 node = lsei_cgraph_node (lsei);
4850 if (cgraph_function_with_gimple_body_p (node)
4851 && IPA_NODE_REF (node) != NULL)
4852 ipa_write_node_info (ob, node);
4853 }
4854 streamer_write_char_stream (ob->main_stream, 0);
4855 produce_asm (ob, NULL);
4856 destroy_output_block (ob);
4857 }
4858
4859 /* Read section in file FILE_DATA of length LEN with data DATA. */
4860
4861 static void
4862 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4863 size_t len)
4864 {
4865 const struct lto_function_header *header =
4866 (const struct lto_function_header *) data;
4867 const int cfg_offset = sizeof (struct lto_function_header);
4868 const int main_offset = cfg_offset + header->cfg_size;
4869 const int string_offset = main_offset + header->main_size;
4870 struct data_in *data_in;
4871 struct lto_input_block ib_main;
4872 unsigned int i;
4873 unsigned int count;
4874
4875 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4876 header->main_size);
4877
4878 data_in =
4879 lto_data_in_create (file_data, (const char *) data + string_offset,
4880 header->string_size, vNULL);
4881 count = streamer_read_uhwi (&ib_main);
4882
4883 for (i = 0; i < count; i++)
4884 {
4885 unsigned int index;
4886 struct cgraph_node *node;
4887 lto_symtab_encoder_t encoder;
4888
4889 index = streamer_read_uhwi (&ib_main);
4890 encoder = file_data->symtab_node_encoder;
4891 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4892 gcc_assert (node->definition);
4893 ipa_read_node_info (&ib_main, node, data_in);
4894 }
4895 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4896 len);
4897 lto_data_in_delete (data_in);
4898 }
4899
4900 /* Read ipcp jump functions. */
4901
4902 void
4903 ipa_prop_read_jump_functions (void)
4904 {
4905 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4906 struct lto_file_decl_data *file_data;
4907 unsigned int j = 0;
4908
4909 ipa_check_create_node_params ();
4910 ipa_check_create_edge_args ();
4911 ipa_register_cgraph_hooks ();
4912
4913 while ((file_data = file_data_vec[j++]))
4914 {
4915 size_t len;
4916 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4917
4918 if (data)
4919 ipa_prop_read_section (file_data, data, len);
4920 }
4921 }
4922
4923 /* After merging units, we can get mismatch in argument counts.
4924 Also decl merging might've rendered parameter lists obsolete.
4925 Also compute called_with_variable_arg info. */
4926
4927 void
4928 ipa_update_after_lto_read (void)
4929 {
4930 ipa_check_create_node_params ();
4931 ipa_check_create_edge_args ();
4932 }
4933
4934 void
4935 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4936 {
4937 int node_ref;
4938 unsigned int count = 0;
4939 lto_symtab_encoder_t encoder;
4940 struct ipa_agg_replacement_value *aggvals, *av;
4941
4942 aggvals = ipa_get_agg_replacements_for_node (node);
4943 encoder = ob->decl_state->symtab_node_encoder;
4944 node_ref = lto_symtab_encoder_encode (encoder, node);
4945 streamer_write_uhwi (ob, node_ref);
4946
4947 for (av = aggvals; av; av = av->next)
4948 count++;
4949 streamer_write_uhwi (ob, count);
4950
4951 for (av = aggvals; av; av = av->next)
4952 {
4953 struct bitpack_d bp;
4954
4955 streamer_write_uhwi (ob, av->offset);
4956 streamer_write_uhwi (ob, av->index);
4957 stream_write_tree (ob, av->value, true);
4958
4959 bp = bitpack_create (ob->main_stream);
4960 bp_pack_value (&bp, av->by_ref, 1);
4961 streamer_write_bitpack (&bp);
4962 }
4963 }
4964
4965 /* Stream in the aggregate value replacement chain for NODE from IB. */
4966
4967 static void
4968 read_agg_replacement_chain (struct lto_input_block *ib,
4969 struct cgraph_node *node,
4970 struct data_in *data_in)
4971 {
4972 struct ipa_agg_replacement_value *aggvals = NULL;
4973 unsigned int count, i;
4974
4975 count = streamer_read_uhwi (ib);
4976 for (i = 0; i <count; i++)
4977 {
4978 struct ipa_agg_replacement_value *av;
4979 struct bitpack_d bp;
4980
4981 av = ggc_alloc<ipa_agg_replacement_value> ();
4982 av->offset = streamer_read_uhwi (ib);
4983 av->index = streamer_read_uhwi (ib);
4984 av->value = stream_read_tree (ib, data_in);
4985 bp = streamer_read_bitpack (ib);
4986 av->by_ref = bp_unpack_value (&bp, 1);
4987 av->next = aggvals;
4988 aggvals = av;
4989 }
4990 ipa_set_node_agg_value_chain (node, aggvals);
4991 }
4992
4993 /* Write all aggregate replacement for nodes in set. */
4994
4995 void
4996 ipa_prop_write_all_agg_replacement (void)
4997 {
4998 struct cgraph_node *node;
4999 struct output_block *ob;
5000 unsigned int count = 0;
5001 lto_symtab_encoder_iterator lsei;
5002 lto_symtab_encoder_t encoder;
5003
5004 if (!ipa_node_agg_replacements)
5005 return;
5006
5007 ob = create_output_block (LTO_section_ipcp_transform);
5008 encoder = ob->decl_state->symtab_node_encoder;
5009 ob->cgraph_node = NULL;
5010 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5011 lsei_next_function_in_partition (&lsei))
5012 {
5013 node = lsei_cgraph_node (lsei);
5014 if (cgraph_function_with_gimple_body_p (node)
5015 && ipa_get_agg_replacements_for_node (node) != NULL)
5016 count++;
5017 }
5018
5019 streamer_write_uhwi (ob, count);
5020
5021 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5022 lsei_next_function_in_partition (&lsei))
5023 {
5024 node = lsei_cgraph_node (lsei);
5025 if (cgraph_function_with_gimple_body_p (node)
5026 && ipa_get_agg_replacements_for_node (node) != NULL)
5027 write_agg_replacement_chain (ob, node);
5028 }
5029 streamer_write_char_stream (ob->main_stream, 0);
5030 produce_asm (ob, NULL);
5031 destroy_output_block (ob);
5032 }
5033
5034 /* Read replacements section in file FILE_DATA of length LEN with data
5035 DATA. */
5036
5037 static void
5038 read_replacements_section (struct lto_file_decl_data *file_data,
5039 const char *data,
5040 size_t len)
5041 {
5042 const struct lto_function_header *header =
5043 (const struct lto_function_header *) data;
5044 const int cfg_offset = sizeof (struct lto_function_header);
5045 const int main_offset = cfg_offset + header->cfg_size;
5046 const int string_offset = main_offset + header->main_size;
5047 struct data_in *data_in;
5048 struct lto_input_block ib_main;
5049 unsigned int i;
5050 unsigned int count;
5051
5052 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
5053 header->main_size);
5054
5055 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5056 header->string_size, vNULL);
5057 count = streamer_read_uhwi (&ib_main);
5058
5059 for (i = 0; i < count; i++)
5060 {
5061 unsigned int index;
5062 struct cgraph_node *node;
5063 lto_symtab_encoder_t encoder;
5064
5065 index = streamer_read_uhwi (&ib_main);
5066 encoder = file_data->symtab_node_encoder;
5067 node = cgraph (lto_symtab_encoder_deref (encoder, index));
5068 gcc_assert (node->definition);
5069 read_agg_replacement_chain (&ib_main, node, data_in);
5070 }
5071 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5072 len);
5073 lto_data_in_delete (data_in);
5074 }
5075
5076 /* Read IPA-CP aggregate replacements. */
5077
5078 void
5079 ipa_prop_read_all_agg_replacement (void)
5080 {
5081 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5082 struct lto_file_decl_data *file_data;
5083 unsigned int j = 0;
5084
5085 while ((file_data = file_data_vec[j++]))
5086 {
5087 size_t len;
5088 const char *data = lto_get_section_data (file_data,
5089 LTO_section_ipcp_transform,
5090 NULL, &len);
5091 if (data)
5092 read_replacements_section (file_data, data, len);
5093 }
5094 }
5095
5096 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5097 NODE. */
5098
5099 static void
5100 adjust_agg_replacement_values (struct cgraph_node *node,
5101 struct ipa_agg_replacement_value *aggval)
5102 {
5103 struct ipa_agg_replacement_value *v;
5104 int i, c = 0, d = 0, *adj;
5105
5106 if (!node->clone.combined_args_to_skip)
5107 return;
5108
5109 for (v = aggval; v; v = v->next)
5110 {
5111 gcc_assert (v->index >= 0);
5112 if (c < v->index)
5113 c = v->index;
5114 }
5115 c++;
5116
5117 adj = XALLOCAVEC (int, c);
5118 for (i = 0; i < c; i++)
5119 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5120 {
5121 adj[i] = -1;
5122 d++;
5123 }
5124 else
5125 adj[i] = i - d;
5126
5127 for (v = aggval; v; v = v->next)
5128 v->index = adj[v->index];
5129 }
5130
5131 /* Dominator walker driving the ipcp modification phase. */
5132
5133 class ipcp_modif_dom_walker : public dom_walker
5134 {
5135 public:
5136 ipcp_modif_dom_walker (struct func_body_info *fbi,
5137 vec<ipa_param_descriptor> descs,
5138 struct ipa_agg_replacement_value *av,
5139 bool *sc, bool *cc)
5140 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5141 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5142
5143 virtual void before_dom_children (basic_block);
5144
5145 private:
5146 struct func_body_info *m_fbi;
5147 vec<ipa_param_descriptor> m_descriptors;
5148 struct ipa_agg_replacement_value *m_aggval;
5149 bool *m_something_changed, *m_cfg_changed;
5150 };
5151
5152 void
5153 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5154 {
5155 gimple_stmt_iterator gsi;
5156 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5157 {
5158 struct ipa_agg_replacement_value *v;
5159 gimple stmt = gsi_stmt (gsi);
5160 tree rhs, val, t;
5161 HOST_WIDE_INT offset, size;
5162 int index;
5163 bool by_ref, vce;
5164
5165 if (!gimple_assign_load_p (stmt))
5166 continue;
5167 rhs = gimple_assign_rhs1 (stmt);
5168 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5169 continue;
5170
5171 vce = false;
5172 t = rhs;
5173 while (handled_component_p (t))
5174 {
5175 /* V_C_E can do things like convert an array of integers to one
5176 bigger integer and similar things we do not handle below. */
5177 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5178 {
5179 vce = true;
5180 break;
5181 }
5182 t = TREE_OPERAND (t, 0);
5183 }
5184 if (vce)
5185 continue;
5186
5187 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5188 &offset, &size, &by_ref))
5189 continue;
5190 for (v = m_aggval; v; v = v->next)
5191 if (v->index == index
5192 && v->offset == offset)
5193 break;
5194 if (!v
5195 || v->by_ref != by_ref
5196 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5197 continue;
5198
5199 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5200 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5201 {
5202 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5203 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5204 else if (TYPE_SIZE (TREE_TYPE (rhs))
5205 == TYPE_SIZE (TREE_TYPE (v->value)))
5206 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5207 else
5208 {
5209 if (dump_file)
5210 {
5211 fprintf (dump_file, " const ");
5212 print_generic_expr (dump_file, v->value, 0);
5213 fprintf (dump_file, " can't be converted to type of ");
5214 print_generic_expr (dump_file, rhs, 0);
5215 fprintf (dump_file, "\n");
5216 }
5217 continue;
5218 }
5219 }
5220 else
5221 val = v->value;
5222
5223 if (dump_file && (dump_flags & TDF_DETAILS))
5224 {
5225 fprintf (dump_file, "Modifying stmt:\n ");
5226 print_gimple_stmt (dump_file, stmt, 0, 0);
5227 }
5228 gimple_assign_set_rhs_from_tree (&gsi, val);
5229 update_stmt (stmt);
5230
5231 if (dump_file && (dump_flags & TDF_DETAILS))
5232 {
5233 fprintf (dump_file, "into:\n ");
5234 print_gimple_stmt (dump_file, stmt, 0, 0);
5235 fprintf (dump_file, "\n");
5236 }
5237
5238 *m_something_changed = true;
5239 if (maybe_clean_eh_stmt (stmt)
5240 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5241 *m_cfg_changed = true;
5242 }
5243
5244 }
5245
5246 /* IPCP transformation phase doing propagation of aggregate values. */
5247
5248 unsigned int
5249 ipcp_transform_function (struct cgraph_node *node)
5250 {
5251 vec<ipa_param_descriptor> descriptors = vNULL;
5252 struct func_body_info fbi;
5253 struct ipa_agg_replacement_value *aggval;
5254 int param_count;
5255 bool cfg_changed = false, something_changed = false;
5256
5257 gcc_checking_assert (cfun);
5258 gcc_checking_assert (current_function_decl);
5259
5260 if (dump_file)
5261 fprintf (dump_file, "Modification phase of node %s/%i\n",
5262 node->name (), node->order);
5263
5264 aggval = ipa_get_agg_replacements_for_node (node);
5265 if (!aggval)
5266 return 0;
5267 param_count = count_formal_params (node->decl);
5268 if (param_count == 0)
5269 return 0;
5270 adjust_agg_replacement_values (node, aggval);
5271 if (dump_file)
5272 ipa_dump_agg_replacement_values (dump_file, aggval);
5273
5274 fbi.node = node;
5275 fbi.info = NULL;
5276 fbi.bb_infos = vNULL;
5277 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5278 fbi.param_count = param_count;
5279 fbi.aa_walked = 0;
5280
5281 descriptors.safe_grow_cleared (param_count);
5282 ipa_populate_param_decls (node, descriptors);
5283 calculate_dominance_info (CDI_DOMINATORS);
5284 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5285 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5286
5287 int i;
5288 struct ipa_bb_info *bi;
5289 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5290 free_ipa_bb_info (bi);
5291 fbi.bb_infos.release ();
5292 free_dominance_info (CDI_DOMINATORS);
5293 (*ipa_node_agg_replacements)[node->uid] = NULL;
5294 descriptors.release ();
5295
5296 if (!something_changed)
5297 return 0;
5298 else if (cfg_changed)
5299 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5300 else
5301 return TODO_update_ssa_only_virtuals;
5302 }