tree-ssa-alias.c (walk_aliased_vdefs_1): Add FUNCTION_ENTRY_REACHED parameter.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "dbgcnt.h"
63 #include "domwalk.h"
64 #include "builtins.h"
65 #include "calls.h"
66
67 /* Intermediate information that we get from alias analysis about a particular
68 parameter in a particular basic_block. When a parameter or the memory it
69 references is marked modified, we use that information in all dominatd
70 blocks without cosulting alias analysis oracle. */
71
72 struct param_aa_status
73 {
74 /* Set when this structure contains meaningful information. If not, the
75 structure describing a dominating BB should be used instead. */
76 bool valid;
77
78 /* Whether we have seen something which might have modified the data in
79 question. PARM is for the parameter itself, REF is for data it points to
80 but using the alias type of individual accesses and PT is the same thing
81 but for computing aggregate pass-through functions using a very inclusive
82 ao_ref. */
83 bool parm_modified, ref_modified, pt_modified;
84 };
85
86 /* Information related to a given BB that used only when looking at function
87 body. */
88
89 struct ipa_bb_info
90 {
91 /* Call graph edges going out of this BB. */
92 vec<cgraph_edge_p> cg_edges;
93 /* Alias analysis statuses of each formal parameter at this bb. */
94 vec<param_aa_status> param_aa_statuses;
95 };
96
97 /* Structure with global information that is only used when looking at function
98 body. */
99
100 struct func_body_info
101 {
102 /* The node that is being analyzed. */
103 cgraph_node *node;
104
105 /* Its info. */
106 struct ipa_node_params *info;
107
108 /* Information about individual BBs. */
109 vec<ipa_bb_info> bb_infos;
110
111 /* Number of parameters. */
112 int param_count;
113
114 /* Number of statements already walked by when analyzing this function. */
115 unsigned int aa_walked;
116 };
117
118 /* Vector where the parameter infos are actually stored. */
119 vec<ipa_node_params> ipa_node_params_vector;
120 /* Vector of known aggregate values in cloned nodes. */
121 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
122 /* Vector where the parameter infos are actually stored. */
123 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
124
125 /* Holders of ipa cgraph hooks: */
126 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
127 static struct cgraph_node_hook_list *node_removal_hook_holder;
128 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
129 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
130 static struct cgraph_node_hook_list *function_insertion_hook_holder;
131
132 /* Description of a reference to an IPA constant. */
133 struct ipa_cst_ref_desc
134 {
135 /* Edge that corresponds to the statement which took the reference. */
136 struct cgraph_edge *cs;
137 /* Linked list of duplicates created when call graph edges are cloned. */
138 struct ipa_cst_ref_desc *next_duplicate;
139 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
140 if out of control. */
141 int refcount;
142 };
143
144 /* Allocation pool for reference descriptions. */
145
146 static alloc_pool ipa_refdesc_pool;
147
148 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
149 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
150
151 static bool
152 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
153 {
154 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
155 struct cl_optimization *os;
156
157 if (!fs_opts)
158 return false;
159 os = TREE_OPTIMIZATION (fs_opts);
160 return !os->x_optimize || !os->x_flag_ipa_cp;
161 }
162
163 /* Return index of the formal whose tree is PTREE in function which corresponds
164 to INFO. */
165
166 static int
167 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
168 {
169 int i, count;
170
171 count = descriptors.length ();
172 for (i = 0; i < count; i++)
173 if (descriptors[i].decl == ptree)
174 return i;
175
176 return -1;
177 }
178
179 /* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
181
182 int
183 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
184 {
185 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
186 }
187
188 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
189 NODE. */
190
191 static void
192 ipa_populate_param_decls (struct cgraph_node *node,
193 vec<ipa_param_descriptor> &descriptors)
194 {
195 tree fndecl;
196 tree fnargs;
197 tree parm;
198 int param_num;
199
200 fndecl = node->decl;
201 gcc_assert (gimple_has_body_p (fndecl));
202 fnargs = DECL_ARGUMENTS (fndecl);
203 param_num = 0;
204 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
205 {
206 descriptors[param_num].decl = parm;
207 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
208 param_num++;
209 }
210 }
211
212 /* Return how many formal parameters FNDECL has. */
213
214 static inline int
215 count_formal_params (tree fndecl)
216 {
217 tree parm;
218 int count = 0;
219 gcc_assert (gimple_has_body_p (fndecl));
220
221 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
222 count++;
223
224 return count;
225 }
226
227 /* Return the declaration of Ith formal parameter of the function corresponding
228 to INFO. Note there is no setter function as this array is built just once
229 using ipa_initialize_node_params. */
230
231 void
232 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
233 {
234 fprintf (file, "param #%i", i);
235 if (info->descriptors[i].decl)
236 {
237 fprintf (file, " ");
238 print_generic_expr (file, info->descriptors[i].decl, 0);
239 }
240 }
241
242 /* Initialize the ipa_node_params structure associated with NODE
243 to hold PARAM_COUNT parameters. */
244
245 void
246 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
247 {
248 struct ipa_node_params *info = IPA_NODE_REF (node);
249
250 if (!info->descriptors.exists () && param_count)
251 info->descriptors.safe_grow_cleared (param_count);
252 }
253
254 /* Initialize the ipa_node_params structure associated with NODE by counting
255 the function parameters, creating the descriptors and populating their
256 param_decls. */
257
258 void
259 ipa_initialize_node_params (struct cgraph_node *node)
260 {
261 struct ipa_node_params *info = IPA_NODE_REF (node);
262
263 if (!info->descriptors.exists ())
264 {
265 ipa_alloc_node_params (node, count_formal_params (node->decl));
266 ipa_populate_param_decls (node, info->descriptors);
267 }
268 }
269
270 /* Print the jump functions associated with call graph edge CS to file F. */
271
272 static void
273 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
274 {
275 int i, count;
276
277 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
278 for (i = 0; i < count; i++)
279 {
280 struct ipa_jump_func *jump_func;
281 enum jump_func_type type;
282
283 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
284 type = jump_func->type;
285
286 fprintf (f, " param %d: ", i);
287 if (type == IPA_JF_UNKNOWN)
288 fprintf (f, "UNKNOWN\n");
289 else if (type == IPA_JF_KNOWN_TYPE)
290 {
291 fprintf (f, "KNOWN TYPE: base ");
292 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
293 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
294 jump_func->value.known_type.offset);
295 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
296 fprintf (f, "\n");
297 }
298 else if (type == IPA_JF_CONST)
299 {
300 tree val = jump_func->value.constant.value;
301 fprintf (f, "CONST: ");
302 print_generic_expr (f, val, 0);
303 if (TREE_CODE (val) == ADDR_EXPR
304 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
305 {
306 fprintf (f, " -> ");
307 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
308 0);
309 }
310 fprintf (f, "\n");
311 }
312 else if (type == IPA_JF_PASS_THROUGH)
313 {
314 fprintf (f, "PASS THROUGH: ");
315 fprintf (f, "%d, op %s",
316 jump_func->value.pass_through.formal_id,
317 get_tree_code_name(jump_func->value.pass_through.operation));
318 if (jump_func->value.pass_through.operation != NOP_EXPR)
319 {
320 fprintf (f, " ");
321 print_generic_expr (f,
322 jump_func->value.pass_through.operand, 0);
323 }
324 if (jump_func->value.pass_through.agg_preserved)
325 fprintf (f, ", agg_preserved");
326 if (jump_func->value.pass_through.type_preserved)
327 fprintf (f, ", type_preserved");
328 fprintf (f, "\n");
329 }
330 else if (type == IPA_JF_ANCESTOR)
331 {
332 fprintf (f, "ANCESTOR: ");
333 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
334 jump_func->value.ancestor.formal_id,
335 jump_func->value.ancestor.offset);
336 print_generic_expr (f, jump_func->value.ancestor.type, 0);
337 if (jump_func->value.ancestor.agg_preserved)
338 fprintf (f, ", agg_preserved");
339 if (jump_func->value.ancestor.type_preserved)
340 fprintf (f, ", type_preserved");
341 fprintf (f, "\n");
342 }
343
344 if (jump_func->agg.items)
345 {
346 struct ipa_agg_jf_item *item;
347 int j;
348
349 fprintf (f, " Aggregate passed by %s:\n",
350 jump_func->agg.by_ref ? "reference" : "value");
351 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
352 {
353 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
354 item->offset);
355 if (TYPE_P (item->value))
356 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
357 tree_to_uhwi (TYPE_SIZE (item->value)));
358 else
359 {
360 fprintf (f, "cst: ");
361 print_generic_expr (f, item->value, 0);
362 }
363 fprintf (f, "\n");
364 }
365 }
366 }
367 }
368
369
370 /* Print the jump functions of all arguments on all call graph edges going from
371 NODE to file F. */
372
373 void
374 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
375 {
376 struct cgraph_edge *cs;
377
378 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
379 node->order);
380 for (cs = node->callees; cs; cs = cs->next_callee)
381 {
382 if (!ipa_edge_args_info_available_for_edge_p (cs))
383 continue;
384
385 fprintf (f, " callsite %s/%i -> %s/%i : \n",
386 xstrdup (node->name ()), node->order,
387 xstrdup (cs->callee->name ()),
388 cs->callee->order);
389 ipa_print_node_jump_functions_for_edge (f, cs);
390 }
391
392 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
393 {
394 struct cgraph_indirect_call_info *ii;
395 if (!ipa_edge_args_info_available_for_edge_p (cs))
396 continue;
397
398 ii = cs->indirect_info;
399 if (ii->agg_contents)
400 fprintf (f, " indirect %s callsite, calling param %i, "
401 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
402 ii->member_ptr ? "member ptr" : "aggregate",
403 ii->param_index, ii->offset,
404 ii->by_ref ? "by reference" : "by_value");
405 else
406 fprintf (f, " indirect %s callsite, calling param %i, "
407 "offset " HOST_WIDE_INT_PRINT_DEC,
408 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
409 ii->offset);
410
411 if (cs->call_stmt)
412 {
413 fprintf (f, ", for stmt ");
414 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
415 }
416 else
417 fprintf (f, "\n");
418 ipa_print_node_jump_functions_for_edge (f, cs);
419 }
420 }
421
422 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
423
424 void
425 ipa_print_all_jump_functions (FILE *f)
426 {
427 struct cgraph_node *node;
428
429 fprintf (f, "\nJump functions:\n");
430 FOR_EACH_FUNCTION (node)
431 {
432 ipa_print_node_jump_functions (f, node);
433 }
434 }
435
436 /* Set JFUNC to be a known type jump function. */
437
438 static void
439 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
440 tree base_type, tree component_type)
441 {
442 /* Recording and propagating main variants increases change that types
443 will match. */
444 base_type = TYPE_MAIN_VARIANT (base_type);
445 component_type = TYPE_MAIN_VARIANT (component_type);
446
447 gcc_assert (contains_polymorphic_type_p (base_type)
448 && contains_polymorphic_type_p (component_type));
449 if (!flag_devirtualize)
450 return;
451 jfunc->type = IPA_JF_KNOWN_TYPE;
452 jfunc->value.known_type.offset = offset,
453 jfunc->value.known_type.base_type = base_type;
454 jfunc->value.known_type.component_type = component_type;
455 gcc_assert (component_type);
456 }
457
458 /* Set JFUNC to be a copy of another jmp (to be used by jump function
459 combination code). The two functions will share their rdesc. */
460
461 static void
462 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
463 struct ipa_jump_func *src)
464
465 {
466 gcc_checking_assert (src->type == IPA_JF_CONST);
467 dst->type = IPA_JF_CONST;
468 dst->value.constant = src->value.constant;
469 }
470
471 /* Set JFUNC to be a constant jmp function. */
472
473 static void
474 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
475 struct cgraph_edge *cs)
476 {
477 constant = unshare_expr (constant);
478 if (constant && EXPR_P (constant))
479 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
480 jfunc->type = IPA_JF_CONST;
481 jfunc->value.constant.value = unshare_expr_without_location (constant);
482
483 if (TREE_CODE (constant) == ADDR_EXPR
484 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
485 {
486 struct ipa_cst_ref_desc *rdesc;
487 if (!ipa_refdesc_pool)
488 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
489 sizeof (struct ipa_cst_ref_desc), 32);
490
491 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
492 rdesc->cs = cs;
493 rdesc->next_duplicate = NULL;
494 rdesc->refcount = 1;
495 jfunc->value.constant.rdesc = rdesc;
496 }
497 else
498 jfunc->value.constant.rdesc = NULL;
499 }
500
501 /* Set JFUNC to be a simple pass-through jump function. */
502 static void
503 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
504 bool agg_preserved, bool type_preserved)
505 {
506 jfunc->type = IPA_JF_PASS_THROUGH;
507 jfunc->value.pass_through.operand = NULL_TREE;
508 jfunc->value.pass_through.formal_id = formal_id;
509 jfunc->value.pass_through.operation = NOP_EXPR;
510 jfunc->value.pass_through.agg_preserved = agg_preserved;
511 jfunc->value.pass_through.type_preserved = type_preserved;
512 }
513
514 /* Set JFUNC to be an arithmetic pass through jump function. */
515
516 static void
517 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
518 tree operand, enum tree_code operation)
519 {
520 jfunc->type = IPA_JF_PASS_THROUGH;
521 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
522 jfunc->value.pass_through.formal_id = formal_id;
523 jfunc->value.pass_through.operation = operation;
524 jfunc->value.pass_through.agg_preserved = false;
525 jfunc->value.pass_through.type_preserved = false;
526 }
527
528 /* Set JFUNC to be an ancestor jump function. */
529
530 static void
531 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
532 tree type, int formal_id, bool agg_preserved,
533 bool type_preserved)
534 {
535 if (!flag_devirtualize)
536 type_preserved = false;
537 if (!type_preserved)
538 type = NULL_TREE;
539 if (type)
540 type = TYPE_MAIN_VARIANT (type);
541 gcc_assert (!type_preserved || contains_polymorphic_type_p (type));
542 jfunc->type = IPA_JF_ANCESTOR;
543 jfunc->value.ancestor.formal_id = formal_id;
544 jfunc->value.ancestor.offset = offset;
545 jfunc->value.ancestor.type = type_preserved ? type : NULL;
546 jfunc->value.ancestor.agg_preserved = agg_preserved;
547 jfunc->value.ancestor.type_preserved = type_preserved;
548 }
549
550 /* Extract the acual BINFO being described by JFUNC which must be a known type
551 jump function. */
552
553 tree
554 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
555 {
556 if (!RECORD_OR_UNION_TYPE_P (jfunc->value.known_type.base_type))
557 return NULL_TREE;
558
559 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
560
561 if (!base_binfo)
562 return NULL_TREE;
563 return get_binfo_at_offset (base_binfo,
564 jfunc->value.known_type.offset,
565 jfunc->value.known_type.component_type);
566 }
567
568 /* Get IPA BB information about the given BB. FBI is the context of analyzis
569 of this function body. */
570
571 static struct ipa_bb_info *
572 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
573 {
574 gcc_checking_assert (fbi);
575 return &fbi->bb_infos[bb->index];
576 }
577
578 /* Structure to be passed in between detect_type_change and
579 check_stmt_for_type_change. */
580
581 struct type_change_info
582 {
583 /* Offset into the object where there is the virtual method pointer we are
584 looking for. */
585 HOST_WIDE_INT offset;
586 /* The declaration or SSA_NAME pointer of the base that we are checking for
587 type change. */
588 tree object;
589 /* If we actually can tell the type that the object has changed to, it is
590 stored in this field. Otherwise it remains NULL_TREE. */
591 tree known_current_type;
592 /* Set to true if dynamic type change has been detected. */
593 bool type_maybe_changed;
594 /* Set to true if multiple types have been encountered. known_current_type
595 must be disregarded in that case. */
596 bool multiple_types_encountered;
597 };
598
599 /* Return true if STMT can modify a virtual method table pointer.
600
601 This function makes special assumptions about both constructors and
602 destructors which are all the functions that are allowed to alter the VMT
603 pointers. It assumes that destructors begin with assignment into all VMT
604 pointers and that constructors essentially look in the following way:
605
606 1) The very first thing they do is that they call constructors of ancestor
607 sub-objects that have them.
608
609 2) Then VMT pointers of this and all its ancestors is set to new values
610 corresponding to the type corresponding to the constructor.
611
612 3) Only afterwards, other stuff such as constructor of member sub-objects
613 and the code written by the user is run. Only this may include calling
614 virtual functions, directly or indirectly.
615
616 There is no way to call a constructor of an ancestor sub-object in any
617 other way.
618
619 This means that we do not have to care whether constructors get the correct
620 type information because they will always change it (in fact, if we define
621 the type to be given by the VMT pointer, it is undefined).
622
623 The most important fact to derive from the above is that if, for some
624 statement in the section 3, we try to detect whether the dynamic type has
625 changed, we can safely ignore all calls as we examine the function body
626 backwards until we reach statements in section 2 because these calls cannot
627 be ancestor constructors or destructors (if the input is not bogus) and so
628 do not change the dynamic type (this holds true only for automatically
629 allocated objects but at the moment we devirtualize only these). We then
630 must detect that statements in section 2 change the dynamic type and can try
631 to derive the new type. That is enough and we can stop, we will never see
632 the calls into constructors of sub-objects in this code. Therefore we can
633 safely ignore all call statements that we traverse.
634 */
635
636 static bool
637 stmt_may_be_vtbl_ptr_store (gimple stmt)
638 {
639 if (is_gimple_call (stmt))
640 return false;
641 if (gimple_clobber_p (stmt))
642 return false;
643 else if (is_gimple_assign (stmt))
644 {
645 tree lhs = gimple_assign_lhs (stmt);
646
647 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
648 {
649 if (flag_strict_aliasing
650 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
651 return false;
652
653 if (TREE_CODE (lhs) == COMPONENT_REF
654 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
655 return false;
656 /* In the future we might want to use get_base_ref_and_offset to find
657 if there is a field corresponding to the offset and if so, proceed
658 almost like if it was a component ref. */
659 }
660 }
661 return true;
662 }
663
664 /* If STMT can be proved to be an assignment to the virtual method table
665 pointer of ANALYZED_OBJ and the type associated with the new table
666 identified, return the type. Otherwise return NULL_TREE. */
667
668 static tree
669 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
670 {
671 HOST_WIDE_INT offset, size, max_size;
672 tree lhs, rhs, base, binfo;
673
674 if (!gimple_assign_single_p (stmt))
675 return NULL_TREE;
676
677 lhs = gimple_assign_lhs (stmt);
678 rhs = gimple_assign_rhs1 (stmt);
679 if (TREE_CODE (lhs) != COMPONENT_REF
680 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
681 return NULL_TREE;
682
683 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
684 if (offset != tci->offset
685 || size != POINTER_SIZE
686 || max_size != POINTER_SIZE)
687 return NULL_TREE;
688 if (TREE_CODE (base) == MEM_REF)
689 {
690 if (TREE_CODE (tci->object) != MEM_REF
691 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
692 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
693 TREE_OPERAND (base, 1)))
694 return NULL_TREE;
695 }
696 else if (tci->object != base)
697 return NULL_TREE;
698
699 binfo = vtable_pointer_value_to_binfo (rhs);
700
701 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
702 base of outer type. In this case we would need to either
703 work on binfos or translate it back to outer type and offset.
704 KNOWN_TYPE jump functions are not ready for that, yet. */
705 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
706 return NULL;
707
708 return BINFO_TYPE (binfo);
709 }
710
711 /* Callback of walk_aliased_vdefs and a helper function for
712 detect_type_change to check whether a particular statement may modify
713 the virtual table pointer, and if possible also determine the new type of
714 the (sub-)object. It stores its result into DATA, which points to a
715 type_change_info structure. */
716
717 static bool
718 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
719 {
720 gimple stmt = SSA_NAME_DEF_STMT (vdef);
721 struct type_change_info *tci = (struct type_change_info *) data;
722
723 if (stmt_may_be_vtbl_ptr_store (stmt))
724 {
725 tree type;
726
727 type = extr_type_from_vtbl_ptr_store (stmt, tci);
728 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
729 if (tci->type_maybe_changed
730 && type != tci->known_current_type)
731 tci->multiple_types_encountered = true;
732 tci->known_current_type = type;
733 tci->type_maybe_changed = true;
734 return true;
735 }
736 else
737 return false;
738 }
739
740 /* See if ARG is PARAM_DECl describing instance passed by pointer
741 or reference in FUNCTION. Return false if the dynamic type may change
742 in between beggining of the function until CALL is invoked.
743
744 Generally functions are not allowed to change type of such instances,
745 but they call destructors. We assume that methods can not destroy the THIS
746 pointer. Also as a special cases, constructor and destructors may change
747 type of the THIS pointer. */
748
749 static bool
750 param_type_may_change_p (tree function, tree arg, gimple call)
751 {
752 /* Pure functions can not do any changes on the dynamic type;
753 that require writting to memory. */
754 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
755 return false;
756 /* We need to check if we are within inlined consturctor
757 or destructor (ideally we would have way to check that the
758 inline cdtor is actually working on ARG, but we don't have
759 easy tie on this, so punt on all non-pure cdtors.
760 We may also record the types of cdtors and once we know type
761 of the instance match them.
762
763 Also code unification optimizations may merge calls from
764 different blocks making return values unreliable. So
765 do nothing during late optimization. */
766 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
767 return true;
768 if (TREE_CODE (arg) == SSA_NAME
769 && SSA_NAME_IS_DEFAULT_DEF (arg)
770 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
771 {
772 /* Normal (non-THIS) argument. */
773 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
774 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
775 /* THIS pointer of an method - here we we want to watch constructors
776 and destructors as those definitely may change the dynamic
777 type. */
778 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
779 && !DECL_CXX_CONSTRUCTOR_P (function)
780 && !DECL_CXX_DESTRUCTOR_P (function)
781 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
782 {
783 /* Walk the inline stack and watch out for ctors/dtors. */
784 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
785 block = BLOCK_SUPERCONTEXT (block))
786 if (BLOCK_ABSTRACT_ORIGIN (block)
787 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
788 {
789 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
790
791 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
792 continue;
793 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
794 && (DECL_CXX_CONSTRUCTOR_P (fn)
795 || DECL_CXX_DESTRUCTOR_P (fn)))
796 return true;
797 }
798 return false;
799 }
800 }
801 return true;
802 }
803
804 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
805 callsite CALL) by looking for assignments to its virtual table pointer. If
806 it is, return true and fill in the jump function JFUNC with relevant type
807 information or set it to unknown. ARG is the object itself (not a pointer
808 to it, unless dereferenced). BASE is the base of the memory access as
809 returned by get_ref_base_and_extent, as is the offset.
810
811 This is helper function for detect_type_change and detect_type_change_ssa
812 that does the heavy work which is usually unnecesary. */
813
814 static bool
815 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
816 gimple call, struct ipa_jump_func *jfunc,
817 HOST_WIDE_INT offset)
818 {
819 struct type_change_info tci;
820 ao_ref ao;
821 bool entry_reached = false;
822
823 gcc_checking_assert (DECL_P (arg)
824 || TREE_CODE (arg) == MEM_REF
825 || handled_component_p (arg));
826
827 comp_type = TYPE_MAIN_VARIANT (comp_type);
828
829 /* Const calls cannot call virtual methods through VMT and so type changes do
830 not matter. */
831 if (!flag_devirtualize || !gimple_vuse (call)
832 /* Be sure expected_type is polymorphic. */
833 || !comp_type
834 || TREE_CODE (comp_type) != RECORD_TYPE
835 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
836 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
837 return true;
838
839 ao_ref_init (&ao, arg);
840 ao.base = base;
841 ao.offset = offset;
842 ao.size = POINTER_SIZE;
843 ao.max_size = ao.size;
844
845 tci.offset = offset;
846 tci.object = get_base_address (arg);
847 tci.known_current_type = NULL_TREE;
848 tci.type_maybe_changed = false;
849 tci.multiple_types_encountered = false;
850
851 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
852 &tci, NULL, &entry_reached);
853 if (!tci.type_maybe_changed)
854 return false;
855
856 if (!tci.known_current_type
857 || tci.multiple_types_encountered
858 || offset != 0
859 /* When the walk reached function entry, it means that type
860 is set along some paths but not along others. */
861 || entry_reached)
862 jfunc->type = IPA_JF_UNKNOWN;
863 else
864 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
865
866 return true;
867 }
868
869 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
870 If it is, return true and fill in the jump function JFUNC with relevant type
871 information or set it to unknown. ARG is the object itself (not a pointer
872 to it, unless dereferenced). BASE is the base of the memory access as
873 returned by get_ref_base_and_extent, as is the offset. */
874
875 static bool
876 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
877 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
878 {
879 if (!flag_devirtualize)
880 return false;
881
882 if (TREE_CODE (base) == MEM_REF
883 && !param_type_may_change_p (current_function_decl,
884 TREE_OPERAND (base, 0),
885 call))
886 return false;
887 return detect_type_change_from_memory_writes (arg, base, comp_type,
888 call, jfunc, offset);
889 }
890
891 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
892 SSA name (its dereference will become the base and the offset is assumed to
893 be zero). */
894
895 static bool
896 detect_type_change_ssa (tree arg, tree comp_type,
897 gimple call, struct ipa_jump_func *jfunc)
898 {
899 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
900 if (!flag_devirtualize
901 || !POINTER_TYPE_P (TREE_TYPE (arg)))
902 return false;
903
904 if (!param_type_may_change_p (current_function_decl, arg, call))
905 return false;
906
907 arg = build2 (MEM_REF, ptr_type_node, arg,
908 build_int_cst (ptr_type_node, 0));
909
910 return detect_type_change_from_memory_writes (arg, arg, comp_type,
911 call, jfunc, 0);
912 }
913
914 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
915 boolean variable pointed to by DATA. */
916
917 static bool
918 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
919 void *data)
920 {
921 bool *b = (bool *) data;
922 *b = true;
923 return true;
924 }
925
926 /* Return true if we have already walked so many statements in AA that we
927 should really just start giving up. */
928
929 static bool
930 aa_overwalked (struct func_body_info *fbi)
931 {
932 gcc_checking_assert (fbi);
933 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
934 }
935
936 /* Find the nearest valid aa status for parameter specified by INDEX that
937 dominates BB. */
938
939 static struct param_aa_status *
940 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
941 int index)
942 {
943 while (true)
944 {
945 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
946 if (!bb)
947 return NULL;
948 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
949 if (!bi->param_aa_statuses.is_empty ()
950 && bi->param_aa_statuses[index].valid)
951 return &bi->param_aa_statuses[index];
952 }
953 }
954
955 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
956 structures and/or intialize the result with a dominating description as
957 necessary. */
958
959 static struct param_aa_status *
960 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
961 int index)
962 {
963 gcc_checking_assert (fbi);
964 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
965 if (bi->param_aa_statuses.is_empty ())
966 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
967 struct param_aa_status *paa = &bi->param_aa_statuses[index];
968 if (!paa->valid)
969 {
970 gcc_checking_assert (!paa->parm_modified
971 && !paa->ref_modified
972 && !paa->pt_modified);
973 struct param_aa_status *dom_paa;
974 dom_paa = find_dominating_aa_status (fbi, bb, index);
975 if (dom_paa)
976 *paa = *dom_paa;
977 else
978 paa->valid = true;
979 }
980
981 return paa;
982 }
983
984 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
985 a value known not to be modified in this function before reaching the
986 statement STMT. FBI holds information about the function we have so far
987 gathered but do not survive the summary building stage. */
988
989 static bool
990 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
991 gimple stmt, tree parm_load)
992 {
993 struct param_aa_status *paa;
994 bool modified = false;
995 ao_ref refd;
996
997 /* FIXME: FBI can be NULL if we are being called from outside
998 ipa_node_analysis or ipcp_transform_function, which currently happens
999 during inlining analysis. It would be great to extend fbi's lifetime and
1000 always have it. Currently, we are just not afraid of too much walking in
1001 that case. */
1002 if (fbi)
1003 {
1004 if (aa_overwalked (fbi))
1005 return false;
1006 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1007 if (paa->parm_modified)
1008 return false;
1009 }
1010 else
1011 paa = NULL;
1012
1013 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1014 ao_ref_init (&refd, parm_load);
1015 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1016 &modified, NULL);
1017 if (fbi)
1018 fbi->aa_walked += walked;
1019 if (paa && modified)
1020 paa->parm_modified = true;
1021 return !modified;
1022 }
1023
1024 /* If STMT is an assignment that loads a value from an parameter declaration,
1025 return the index of the parameter in ipa_node_params which has not been
1026 modified. Otherwise return -1. */
1027
1028 static int
1029 load_from_unmodified_param (struct func_body_info *fbi,
1030 vec<ipa_param_descriptor> descriptors,
1031 gimple stmt)
1032 {
1033 int index;
1034 tree op1;
1035
1036 if (!gimple_assign_single_p (stmt))
1037 return -1;
1038
1039 op1 = gimple_assign_rhs1 (stmt);
1040 if (TREE_CODE (op1) != PARM_DECL)
1041 return -1;
1042
1043 index = ipa_get_param_decl_index_1 (descriptors, op1);
1044 if (index < 0
1045 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1046 return -1;
1047
1048 return index;
1049 }
1050
1051 /* Return true if memory reference REF (which must be a load through parameter
1052 with INDEX) loads data that are known to be unmodified in this function
1053 before reaching statement STMT. */
1054
1055 static bool
1056 parm_ref_data_preserved_p (struct func_body_info *fbi,
1057 int index, gimple stmt, tree ref)
1058 {
1059 struct param_aa_status *paa;
1060 bool modified = false;
1061 ao_ref refd;
1062
1063 /* FIXME: FBI can be NULL if we are being called from outside
1064 ipa_node_analysis or ipcp_transform_function, which currently happens
1065 during inlining analysis. It would be great to extend fbi's lifetime and
1066 always have it. Currently, we are just not afraid of too much walking in
1067 that case. */
1068 if (fbi)
1069 {
1070 if (aa_overwalked (fbi))
1071 return false;
1072 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1073 if (paa->ref_modified)
1074 return false;
1075 }
1076 else
1077 paa = NULL;
1078
1079 gcc_checking_assert (gimple_vuse (stmt));
1080 ao_ref_init (&refd, ref);
1081 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1082 &modified, NULL);
1083 if (fbi)
1084 fbi->aa_walked += walked;
1085 if (paa && modified)
1086 paa->ref_modified = true;
1087 return !modified;
1088 }
1089
1090 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1091 is known to be unmodified in this function before reaching call statement
1092 CALL into which it is passed. FBI describes the function body. */
1093
1094 static bool
1095 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1096 gimple call, tree parm)
1097 {
1098 bool modified = false;
1099 ao_ref refd;
1100
1101 /* It's unnecessary to calculate anything about memory contnets for a const
1102 function because it is not goin to use it. But do not cache the result
1103 either. Also, no such calculations for non-pointers. */
1104 if (!gimple_vuse (call)
1105 || !POINTER_TYPE_P (TREE_TYPE (parm))
1106 || aa_overwalked (fbi))
1107 return false;
1108
1109 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1110 index);
1111 if (paa->pt_modified)
1112 return false;
1113
1114 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1115 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1116 &modified, NULL);
1117 fbi->aa_walked += walked;
1118 if (modified)
1119 paa->pt_modified = true;
1120 return !modified;
1121 }
1122
1123 /* Return true if we can prove that OP is a memory reference loading unmodified
1124 data from an aggregate passed as a parameter and if the aggregate is passed
1125 by reference, that the alias type of the load corresponds to the type of the
1126 formal parameter (so that we can rely on this type for TBAA in callers).
1127 INFO and PARMS_AINFO describe parameters of the current function (but the
1128 latter can be NULL), STMT is the load statement. If function returns true,
1129 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1130 within the aggregate and whether it is a load from a value passed by
1131 reference respectively. */
1132
1133 static bool
1134 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1135 vec<ipa_param_descriptor> descriptors,
1136 gimple stmt, tree op, int *index_p,
1137 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1138 bool *by_ref_p)
1139 {
1140 int index;
1141 HOST_WIDE_INT size, max_size;
1142 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1143
1144 if (max_size == -1 || max_size != size || *offset_p < 0)
1145 return false;
1146
1147 if (DECL_P (base))
1148 {
1149 int index = ipa_get_param_decl_index_1 (descriptors, base);
1150 if (index >= 0
1151 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1152 {
1153 *index_p = index;
1154 *by_ref_p = false;
1155 if (size_p)
1156 *size_p = size;
1157 return true;
1158 }
1159 return false;
1160 }
1161
1162 if (TREE_CODE (base) != MEM_REF
1163 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1164 || !integer_zerop (TREE_OPERAND (base, 1)))
1165 return false;
1166
1167 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1168 {
1169 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1170 index = ipa_get_param_decl_index_1 (descriptors, parm);
1171 }
1172 else
1173 {
1174 /* This branch catches situations where a pointer parameter is not a
1175 gimple register, for example:
1176
1177 void hip7(S*) (struct S * p)
1178 {
1179 void (*<T2e4>) (struct S *) D.1867;
1180 struct S * p.1;
1181
1182 <bb 2>:
1183 p.1_1 = p;
1184 D.1867_2 = p.1_1->f;
1185 D.1867_2 ();
1186 gdp = &p;
1187 */
1188
1189 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1190 index = load_from_unmodified_param (fbi, descriptors, def);
1191 }
1192
1193 if (index >= 0
1194 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1195 {
1196 *index_p = index;
1197 *by_ref_p = true;
1198 if (size_p)
1199 *size_p = size;
1200 return true;
1201 }
1202 return false;
1203 }
1204
1205 /* Just like the previous function, just without the param_analysis_info
1206 pointer, for users outside of this file. */
1207
1208 bool
1209 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1210 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1211 bool *by_ref_p)
1212 {
1213 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1214 offset_p, NULL, by_ref_p);
1215 }
1216
1217 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1218 of an assignment statement STMT, try to determine whether we are actually
1219 handling any of the following cases and construct an appropriate jump
1220 function into JFUNC if so:
1221
1222 1) The passed value is loaded from a formal parameter which is not a gimple
1223 register (most probably because it is addressable, the value has to be
1224 scalar) and we can guarantee the value has not changed. This case can
1225 therefore be described by a simple pass-through jump function. For example:
1226
1227 foo (int a)
1228 {
1229 int a.0;
1230
1231 a.0_2 = a;
1232 bar (a.0_2);
1233
1234 2) The passed value can be described by a simple arithmetic pass-through
1235 jump function. E.g.
1236
1237 foo (int a)
1238 {
1239 int D.2064;
1240
1241 D.2064_4 = a.1(D) + 4;
1242 bar (D.2064_4);
1243
1244 This case can also occur in combination of the previous one, e.g.:
1245
1246 foo (int a, int z)
1247 {
1248 int a.0;
1249 int D.2064;
1250
1251 a.0_3 = a;
1252 D.2064_4 = a.0_3 + 4;
1253 foo (D.2064_4);
1254
1255 3) The passed value is an address of an object within another one (which
1256 also passed by reference). Such situations are described by an ancestor
1257 jump function and describe situations such as:
1258
1259 B::foo() (struct B * const this)
1260 {
1261 struct A * D.1845;
1262
1263 D.1845_2 = &this_1(D)->D.1748;
1264 A::bar (D.1845_2);
1265
1266 INFO is the structure describing individual parameters access different
1267 stages of IPA optimizations. PARMS_AINFO contains the information that is
1268 only needed for intraprocedural analysis. */
1269
1270 static void
1271 compute_complex_assign_jump_func (struct func_body_info *fbi,
1272 struct ipa_node_params *info,
1273 struct ipa_jump_func *jfunc,
1274 gimple call, gimple stmt, tree name,
1275 tree param_type)
1276 {
1277 HOST_WIDE_INT offset, size, max_size;
1278 tree op1, tc_ssa, base, ssa;
1279 int index;
1280
1281 op1 = gimple_assign_rhs1 (stmt);
1282
1283 if (TREE_CODE (op1) == SSA_NAME)
1284 {
1285 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1286 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1287 else
1288 index = load_from_unmodified_param (fbi, info->descriptors,
1289 SSA_NAME_DEF_STMT (op1));
1290 tc_ssa = op1;
1291 }
1292 else
1293 {
1294 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1295 tc_ssa = gimple_assign_lhs (stmt);
1296 }
1297
1298 if (index >= 0)
1299 {
1300 tree op2 = gimple_assign_rhs2 (stmt);
1301
1302 if (op2)
1303 {
1304 if (!is_gimple_ip_invariant (op2)
1305 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1306 && !useless_type_conversion_p (TREE_TYPE (name),
1307 TREE_TYPE (op1))))
1308 return;
1309
1310 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1311 gimple_assign_rhs_code (stmt));
1312 }
1313 else if (gimple_assign_single_p (stmt))
1314 {
1315 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1316 bool type_p = false;
1317
1318 if (param_type && POINTER_TYPE_P (param_type))
1319 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1320 call, jfunc);
1321 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1322 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1323 }
1324 return;
1325 }
1326
1327 if (TREE_CODE (op1) != ADDR_EXPR)
1328 return;
1329 op1 = TREE_OPERAND (op1, 0);
1330 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1331 return;
1332 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1333 if (TREE_CODE (base) != MEM_REF
1334 /* If this is a varying address, punt. */
1335 || max_size == -1
1336 || max_size != size)
1337 return;
1338 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1339 ssa = TREE_OPERAND (base, 0);
1340 if (TREE_CODE (ssa) != SSA_NAME
1341 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1342 || offset < 0)
1343 return;
1344
1345 /* Dynamic types are changed in constructors and destructors. */
1346 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1347 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1348 {
1349 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1350 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1351 call, jfunc, offset));
1352 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1353 ipa_set_ancestor_jf (jfunc, offset,
1354 type_p ? TREE_TYPE (param_type) : NULL, index,
1355 parm_ref_data_pass_through_p (fbi, index,
1356 call, ssa), type_p);
1357 }
1358 }
1359
1360 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1361 it looks like:
1362
1363 iftmp.1_3 = &obj_2(D)->D.1762;
1364
1365 The base of the MEM_REF must be a default definition SSA NAME of a
1366 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1367 whole MEM_REF expression is returned and the offset calculated from any
1368 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1369 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1370
1371 static tree
1372 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1373 {
1374 HOST_WIDE_INT size, max_size;
1375 tree expr, parm, obj;
1376
1377 if (!gimple_assign_single_p (assign))
1378 return NULL_TREE;
1379 expr = gimple_assign_rhs1 (assign);
1380
1381 if (TREE_CODE (expr) != ADDR_EXPR)
1382 return NULL_TREE;
1383 expr = TREE_OPERAND (expr, 0);
1384 obj = expr;
1385 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1386
1387 if (TREE_CODE (expr) != MEM_REF
1388 /* If this is a varying address, punt. */
1389 || max_size == -1
1390 || max_size != size
1391 || *offset < 0)
1392 return NULL_TREE;
1393 parm = TREE_OPERAND (expr, 0);
1394 if (TREE_CODE (parm) != SSA_NAME
1395 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1396 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1397 return NULL_TREE;
1398
1399 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1400 *obj_p = obj;
1401 return expr;
1402 }
1403
1404
1405 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1406 statement PHI, try to find out whether NAME is in fact a
1407 multiple-inheritance typecast from a descendant into an ancestor of a formal
1408 parameter and thus can be described by an ancestor jump function and if so,
1409 write the appropriate function into JFUNC.
1410
1411 Essentially we want to match the following pattern:
1412
1413 if (obj_2(D) != 0B)
1414 goto <bb 3>;
1415 else
1416 goto <bb 4>;
1417
1418 <bb 3>:
1419 iftmp.1_3 = &obj_2(D)->D.1762;
1420
1421 <bb 4>:
1422 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1423 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1424 return D.1879_6; */
1425
1426 static void
1427 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1428 struct ipa_node_params *info,
1429 struct ipa_jump_func *jfunc,
1430 gimple call, gimple phi, tree param_type)
1431 {
1432 HOST_WIDE_INT offset;
1433 gimple assign, cond;
1434 basic_block phi_bb, assign_bb, cond_bb;
1435 tree tmp, parm, expr, obj;
1436 int index, i;
1437
1438 if (gimple_phi_num_args (phi) != 2)
1439 return;
1440
1441 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1442 tmp = PHI_ARG_DEF (phi, 0);
1443 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1444 tmp = PHI_ARG_DEF (phi, 1);
1445 else
1446 return;
1447 if (TREE_CODE (tmp) != SSA_NAME
1448 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1449 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1450 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1451 return;
1452
1453 assign = SSA_NAME_DEF_STMT (tmp);
1454 assign_bb = gimple_bb (assign);
1455 if (!single_pred_p (assign_bb))
1456 return;
1457 expr = get_ancestor_addr_info (assign, &obj, &offset);
1458 if (!expr)
1459 return;
1460 parm = TREE_OPERAND (expr, 0);
1461 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1462 if (index < 0)
1463 return;
1464
1465 cond_bb = single_pred (assign_bb);
1466 cond = last_stmt (cond_bb);
1467 if (!cond
1468 || gimple_code (cond) != GIMPLE_COND
1469 || gimple_cond_code (cond) != NE_EXPR
1470 || gimple_cond_lhs (cond) != parm
1471 || !integer_zerop (gimple_cond_rhs (cond)))
1472 return;
1473
1474 phi_bb = gimple_bb (phi);
1475 for (i = 0; i < 2; i++)
1476 {
1477 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1478 if (pred != assign_bb && pred != cond_bb)
1479 return;
1480 }
1481
1482 bool type_p = false;
1483 if (param_type && POINTER_TYPE_P (param_type)
1484 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
1485 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1486 call, jfunc, offset);
1487 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1488 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1489 index,
1490 parm_ref_data_pass_through_p (fbi, index, call, parm),
1491 type_p);
1492 }
1493
1494 /* Given OP which is passed as an actual argument to a called function,
1495 determine if it is possible to construct a KNOWN_TYPE jump function for it
1496 and if so, create one and store it to JFUNC.
1497 EXPECTED_TYPE represents a type the argument should be in */
1498
1499 static void
1500 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1501 gimple call, tree expected_type)
1502 {
1503 HOST_WIDE_INT offset, size, max_size;
1504 tree base;
1505
1506 if (!flag_devirtualize
1507 || TREE_CODE (op) != ADDR_EXPR
1508 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
1509 /* Be sure expected_type is polymorphic. */
1510 || !expected_type
1511 || !contains_polymorphic_type_p (expected_type))
1512 return;
1513
1514 op = TREE_OPERAND (op, 0);
1515 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1516 if (!DECL_P (base)
1517 || max_size == -1
1518 || max_size != size
1519 || !contains_polymorphic_type_p (TREE_TYPE (base)))
1520 return;
1521
1522 if (decl_maybe_in_construction_p (base, TREE_TYPE (base),
1523 call, current_function_decl)
1524 /* Even if the var seems to be in construction by inline call stack,
1525 we may work out the actual type by walking memory writes. */
1526 && (!is_global_var (base)
1527 && detect_type_change (op, base, expected_type, call, jfunc, offset)))
1528 return;
1529
1530 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1531 expected_type);
1532 }
1533
1534 /* Inspect the given TYPE and return true iff it has the same structure (the
1535 same number of fields of the same types) as a C++ member pointer. If
1536 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1537 corresponding fields there. */
1538
1539 static bool
1540 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1541 {
1542 tree fld;
1543
1544 if (TREE_CODE (type) != RECORD_TYPE)
1545 return false;
1546
1547 fld = TYPE_FIELDS (type);
1548 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1549 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1550 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1551 return false;
1552
1553 if (method_ptr)
1554 *method_ptr = fld;
1555
1556 fld = DECL_CHAIN (fld);
1557 if (!fld || INTEGRAL_TYPE_P (fld)
1558 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1559 return false;
1560 if (delta)
1561 *delta = fld;
1562
1563 if (DECL_CHAIN (fld))
1564 return false;
1565
1566 return true;
1567 }
1568
1569 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1570 return the rhs of its defining statement. Otherwise return RHS as it
1571 is. */
1572
1573 static inline tree
1574 get_ssa_def_if_simple_copy (tree rhs)
1575 {
1576 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1577 {
1578 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1579
1580 if (gimple_assign_single_p (def_stmt))
1581 rhs = gimple_assign_rhs1 (def_stmt);
1582 else
1583 break;
1584 }
1585 return rhs;
1586 }
1587
1588 /* Simple linked list, describing known contents of an aggregate beforere
1589 call. */
1590
1591 struct ipa_known_agg_contents_list
1592 {
1593 /* Offset and size of the described part of the aggregate. */
1594 HOST_WIDE_INT offset, size;
1595 /* Known constant value or NULL if the contents is known to be unknown. */
1596 tree constant;
1597 /* Pointer to the next structure in the list. */
1598 struct ipa_known_agg_contents_list *next;
1599 };
1600
1601 /* Find the proper place in linked list of ipa_known_agg_contents_list
1602 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1603 unless there is a partial overlap, in which case return NULL, or such
1604 element is already there, in which case set *ALREADY_THERE to true. */
1605
1606 static struct ipa_known_agg_contents_list **
1607 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1608 HOST_WIDE_INT lhs_offset,
1609 HOST_WIDE_INT lhs_size,
1610 bool *already_there)
1611 {
1612 struct ipa_known_agg_contents_list **p = list;
1613 while (*p && (*p)->offset < lhs_offset)
1614 {
1615 if ((*p)->offset + (*p)->size > lhs_offset)
1616 return NULL;
1617 p = &(*p)->next;
1618 }
1619
1620 if (*p && (*p)->offset < lhs_offset + lhs_size)
1621 {
1622 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1623 /* We already know this value is subsequently overwritten with
1624 something else. */
1625 *already_there = true;
1626 else
1627 /* Otherwise this is a partial overlap which we cannot
1628 represent. */
1629 return NULL;
1630 }
1631 return p;
1632 }
1633
1634 /* Build aggregate jump function from LIST, assuming there are exactly
1635 CONST_COUNT constant entries there and that th offset of the passed argument
1636 is ARG_OFFSET and store it into JFUNC. */
1637
1638 static void
1639 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1640 int const_count, HOST_WIDE_INT arg_offset,
1641 struct ipa_jump_func *jfunc)
1642 {
1643 vec_alloc (jfunc->agg.items, const_count);
1644 while (list)
1645 {
1646 if (list->constant)
1647 {
1648 struct ipa_agg_jf_item item;
1649 item.offset = list->offset - arg_offset;
1650 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1651 item.value = unshare_expr_without_location (list->constant);
1652 jfunc->agg.items->quick_push (item);
1653 }
1654 list = list->next;
1655 }
1656 }
1657
1658 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1659 in ARG is filled in with constant values. ARG can either be an aggregate
1660 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1661 aggregate. JFUNC is the jump function into which the constants are
1662 subsequently stored. */
1663
1664 static void
1665 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1666 struct ipa_jump_func *jfunc)
1667 {
1668 struct ipa_known_agg_contents_list *list = NULL;
1669 int item_count = 0, const_count = 0;
1670 HOST_WIDE_INT arg_offset, arg_size;
1671 gimple_stmt_iterator gsi;
1672 tree arg_base;
1673 bool check_ref, by_ref;
1674 ao_ref r;
1675
1676 /* The function operates in three stages. First, we prepare check_ref, r,
1677 arg_base and arg_offset based on what is actually passed as an actual
1678 argument. */
1679
1680 if (POINTER_TYPE_P (arg_type))
1681 {
1682 by_ref = true;
1683 if (TREE_CODE (arg) == SSA_NAME)
1684 {
1685 tree type_size;
1686 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1687 return;
1688 check_ref = true;
1689 arg_base = arg;
1690 arg_offset = 0;
1691 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1692 arg_size = tree_to_uhwi (type_size);
1693 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1694 }
1695 else if (TREE_CODE (arg) == ADDR_EXPR)
1696 {
1697 HOST_WIDE_INT arg_max_size;
1698
1699 arg = TREE_OPERAND (arg, 0);
1700 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1701 &arg_max_size);
1702 if (arg_max_size == -1
1703 || arg_max_size != arg_size
1704 || arg_offset < 0)
1705 return;
1706 if (DECL_P (arg_base))
1707 {
1708 check_ref = false;
1709 ao_ref_init (&r, arg_base);
1710 }
1711 else
1712 return;
1713 }
1714 else
1715 return;
1716 }
1717 else
1718 {
1719 HOST_WIDE_INT arg_max_size;
1720
1721 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1722
1723 by_ref = false;
1724 check_ref = false;
1725 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1726 &arg_max_size);
1727 if (arg_max_size == -1
1728 || arg_max_size != arg_size
1729 || arg_offset < 0)
1730 return;
1731
1732 ao_ref_init (&r, arg);
1733 }
1734
1735 /* Second stage walks back the BB, looks at individual statements and as long
1736 as it is confident of how the statements affect contents of the
1737 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1738 describing it. */
1739 gsi = gsi_for_stmt (call);
1740 gsi_prev (&gsi);
1741 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1742 {
1743 struct ipa_known_agg_contents_list *n, **p;
1744 gimple stmt = gsi_stmt (gsi);
1745 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1746 tree lhs, rhs, lhs_base;
1747
1748 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1749 continue;
1750 if (!gimple_assign_single_p (stmt))
1751 break;
1752
1753 lhs = gimple_assign_lhs (stmt);
1754 rhs = gimple_assign_rhs1 (stmt);
1755 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1756 || TREE_CODE (lhs) == BIT_FIELD_REF
1757 || contains_bitfld_component_ref_p (lhs))
1758 break;
1759
1760 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1761 &lhs_max_size);
1762 if (lhs_max_size == -1
1763 || lhs_max_size != lhs_size)
1764 break;
1765
1766 if (check_ref)
1767 {
1768 if (TREE_CODE (lhs_base) != MEM_REF
1769 || TREE_OPERAND (lhs_base, 0) != arg_base
1770 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1771 break;
1772 }
1773 else if (lhs_base != arg_base)
1774 {
1775 if (DECL_P (lhs_base))
1776 continue;
1777 else
1778 break;
1779 }
1780
1781 bool already_there = false;
1782 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1783 &already_there);
1784 if (!p)
1785 break;
1786 if (already_there)
1787 continue;
1788
1789 rhs = get_ssa_def_if_simple_copy (rhs);
1790 n = XALLOCA (struct ipa_known_agg_contents_list);
1791 n->size = lhs_size;
1792 n->offset = lhs_offset;
1793 if (is_gimple_ip_invariant (rhs))
1794 {
1795 n->constant = rhs;
1796 const_count++;
1797 }
1798 else
1799 n->constant = NULL_TREE;
1800 n->next = *p;
1801 *p = n;
1802
1803 item_count++;
1804 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1805 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1806 break;
1807 }
1808
1809 /* Third stage just goes over the list and creates an appropriate vector of
1810 ipa_agg_jf_item structures out of it, of sourse only if there are
1811 any known constants to begin with. */
1812
1813 if (const_count)
1814 {
1815 jfunc->agg.by_ref = by_ref;
1816 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1817 }
1818 }
1819
1820 static tree
1821 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1822 {
1823 int n;
1824 tree type = (e->callee
1825 ? TREE_TYPE (e->callee->decl)
1826 : gimple_call_fntype (e->call_stmt));
1827 tree t = TYPE_ARG_TYPES (type);
1828
1829 for (n = 0; n < i; n++)
1830 {
1831 if (!t)
1832 break;
1833 t = TREE_CHAIN (t);
1834 }
1835 if (t)
1836 return TREE_VALUE (t);
1837 if (!e->callee)
1838 return NULL;
1839 t = DECL_ARGUMENTS (e->callee->decl);
1840 for (n = 0; n < i; n++)
1841 {
1842 if (!t)
1843 return NULL;
1844 t = TREE_CHAIN (t);
1845 }
1846 if (t)
1847 return TREE_TYPE (t);
1848 return NULL;
1849 }
1850
1851 /* Compute jump function for all arguments of callsite CS and insert the
1852 information in the jump_functions array in the ipa_edge_args corresponding
1853 to this callsite. */
1854
1855 static void
1856 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1857 struct cgraph_edge *cs)
1858 {
1859 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1860 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1861 gimple call = cs->call_stmt;
1862 int n, arg_num = gimple_call_num_args (call);
1863
1864 if (arg_num == 0 || args->jump_functions)
1865 return;
1866 vec_safe_grow_cleared (args->jump_functions, arg_num);
1867
1868 if (gimple_call_internal_p (call))
1869 return;
1870 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1871 return;
1872
1873 for (n = 0; n < arg_num; n++)
1874 {
1875 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1876 tree arg = gimple_call_arg (call, n);
1877 tree param_type = ipa_get_callee_param_type (cs, n);
1878
1879 if (is_gimple_ip_invariant (arg))
1880 ipa_set_jf_constant (jfunc, arg, cs);
1881 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1882 && TREE_CODE (arg) == PARM_DECL)
1883 {
1884 int index = ipa_get_param_decl_index (info, arg);
1885
1886 gcc_assert (index >=0);
1887 /* Aggregate passed by value, check for pass-through, otherwise we
1888 will attempt to fill in aggregate contents later in this
1889 for cycle. */
1890 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1891 {
1892 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1893 continue;
1894 }
1895 }
1896 else if (TREE_CODE (arg) == SSA_NAME)
1897 {
1898 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1899 {
1900 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1901 if (index >= 0)
1902 {
1903 bool agg_p, type_p;
1904 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1905 if (param_type && POINTER_TYPE_P (param_type))
1906 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1907 call, jfunc);
1908 else
1909 type_p = false;
1910 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1911 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1912 type_p);
1913 }
1914 }
1915 else
1916 {
1917 gimple stmt = SSA_NAME_DEF_STMT (arg);
1918 if (is_gimple_assign (stmt))
1919 compute_complex_assign_jump_func (fbi, info, jfunc,
1920 call, stmt, arg, param_type);
1921 else if (gimple_code (stmt) == GIMPLE_PHI)
1922 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1923 call, stmt, param_type);
1924 }
1925 }
1926 else
1927 compute_known_type_jump_func (arg, jfunc, call,
1928 param_type
1929 && POINTER_TYPE_P (param_type)
1930 ? TREE_TYPE (param_type)
1931 : NULL);
1932
1933 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1934 passed (because type conversions are ignored in gimple). Usually we can
1935 safely get type from function declaration, but in case of K&R prototypes or
1936 variadic functions we can try our luck with type of the pointer passed.
1937 TODO: Since we look for actual initialization of the memory object, we may better
1938 work out the type based on the memory stores we find. */
1939 if (!param_type)
1940 param_type = TREE_TYPE (arg);
1941
1942 if ((jfunc->type != IPA_JF_PASS_THROUGH
1943 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1944 && (jfunc->type != IPA_JF_ANCESTOR
1945 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1946 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1947 || POINTER_TYPE_P (param_type)))
1948 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1949 }
1950 }
1951
1952 /* Compute jump functions for all edges - both direct and indirect - outgoing
1953 from BB. */
1954
1955 static void
1956 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1957 {
1958 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1959 int i;
1960 struct cgraph_edge *cs;
1961
1962 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1963 {
1964 struct cgraph_node *callee = cs->callee;
1965
1966 if (callee)
1967 {
1968 cgraph_function_or_thunk_node (callee, NULL);
1969 /* We do not need to bother analyzing calls to unknown functions
1970 unless they may become known during lto/whopr. */
1971 if (!callee->definition && !flag_lto)
1972 continue;
1973 }
1974 ipa_compute_jump_functions_for_edge (fbi, cs);
1975 }
1976 }
1977
1978 /* If STMT looks like a statement loading a value from a member pointer formal
1979 parameter, return that parameter and store the offset of the field to
1980 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1981 might be clobbered). If USE_DELTA, then we look for a use of the delta
1982 field rather than the pfn. */
1983
1984 static tree
1985 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1986 HOST_WIDE_INT *offset_p)
1987 {
1988 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1989
1990 if (!gimple_assign_single_p (stmt))
1991 return NULL_TREE;
1992
1993 rhs = gimple_assign_rhs1 (stmt);
1994 if (TREE_CODE (rhs) == COMPONENT_REF)
1995 {
1996 ref_field = TREE_OPERAND (rhs, 1);
1997 rhs = TREE_OPERAND (rhs, 0);
1998 }
1999 else
2000 ref_field = NULL_TREE;
2001 if (TREE_CODE (rhs) != MEM_REF)
2002 return NULL_TREE;
2003 rec = TREE_OPERAND (rhs, 0);
2004 if (TREE_CODE (rec) != ADDR_EXPR)
2005 return NULL_TREE;
2006 rec = TREE_OPERAND (rec, 0);
2007 if (TREE_CODE (rec) != PARM_DECL
2008 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2009 return NULL_TREE;
2010 ref_offset = TREE_OPERAND (rhs, 1);
2011
2012 if (use_delta)
2013 fld = delta_field;
2014 else
2015 fld = ptr_field;
2016 if (offset_p)
2017 *offset_p = int_bit_position (fld);
2018
2019 if (ref_field)
2020 {
2021 if (integer_nonzerop (ref_offset))
2022 return NULL_TREE;
2023 return ref_field == fld ? rec : NULL_TREE;
2024 }
2025 else
2026 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2027 : NULL_TREE;
2028 }
2029
2030 /* Returns true iff T is an SSA_NAME defined by a statement. */
2031
2032 static bool
2033 ipa_is_ssa_with_stmt_def (tree t)
2034 {
2035 if (TREE_CODE (t) == SSA_NAME
2036 && !SSA_NAME_IS_DEFAULT_DEF (t))
2037 return true;
2038 else
2039 return false;
2040 }
2041
2042 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2043 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2044 indirect call graph edge. */
2045
2046 static struct cgraph_edge *
2047 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
2048 {
2049 struct cgraph_edge *cs;
2050
2051 cs = cgraph_edge (node, stmt);
2052 cs->indirect_info->param_index = param_index;
2053 cs->indirect_info->agg_contents = 0;
2054 cs->indirect_info->member_ptr = 0;
2055 return cs;
2056 }
2057
2058 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2059 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2060 intermediate information about each formal parameter. Currently it checks
2061 whether the call calls a pointer that is a formal parameter and if so, the
2062 parameter is marked with the called flag and an indirect call graph edge
2063 describing the call is created. This is very simple for ordinary pointers
2064 represented in SSA but not-so-nice when it comes to member pointers. The
2065 ugly part of this function does nothing more than trying to match the
2066 pattern of such a call. An example of such a pattern is the gimple dump
2067 below, the call is on the last line:
2068
2069 <bb 2>:
2070 f$__delta_5 = f.__delta;
2071 f$__pfn_24 = f.__pfn;
2072
2073 or
2074 <bb 2>:
2075 f$__delta_5 = MEM[(struct *)&f];
2076 f$__pfn_24 = MEM[(struct *)&f + 4B];
2077
2078 and a few lines below:
2079
2080 <bb 5>
2081 D.2496_3 = (int) f$__pfn_24;
2082 D.2497_4 = D.2496_3 & 1;
2083 if (D.2497_4 != 0)
2084 goto <bb 3>;
2085 else
2086 goto <bb 4>;
2087
2088 <bb 6>:
2089 D.2500_7 = (unsigned int) f$__delta_5;
2090 D.2501_8 = &S + D.2500_7;
2091 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2092 D.2503_10 = *D.2502_9;
2093 D.2504_12 = f$__pfn_24 + -1;
2094 D.2505_13 = (unsigned int) D.2504_12;
2095 D.2506_14 = D.2503_10 + D.2505_13;
2096 D.2507_15 = *D.2506_14;
2097 iftmp.11_16 = (String:: *) D.2507_15;
2098
2099 <bb 7>:
2100 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2101 D.2500_19 = (unsigned int) f$__delta_5;
2102 D.2508_20 = &S + D.2500_19;
2103 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2104
2105 Such patterns are results of simple calls to a member pointer:
2106
2107 int doprinting (int (MyString::* f)(int) const)
2108 {
2109 MyString S ("somestring");
2110
2111 return (S.*f)(4);
2112 }
2113
2114 Moreover, the function also looks for called pointers loaded from aggregates
2115 passed by value or reference. */
2116
2117 static void
2118 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2119 tree target)
2120 {
2121 struct ipa_node_params *info = fbi->info;
2122 HOST_WIDE_INT offset;
2123 bool by_ref;
2124
2125 if (SSA_NAME_IS_DEFAULT_DEF (target))
2126 {
2127 tree var = SSA_NAME_VAR (target);
2128 int index = ipa_get_param_decl_index (info, var);
2129 if (index >= 0)
2130 ipa_note_param_call (fbi->node, index, call);
2131 return;
2132 }
2133
2134 int index;
2135 gimple def = SSA_NAME_DEF_STMT (target);
2136 if (gimple_assign_single_p (def)
2137 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2138 gimple_assign_rhs1 (def), &index, &offset,
2139 NULL, &by_ref))
2140 {
2141 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2142 if (cs->indirect_info->offset != offset)
2143 cs->indirect_info->outer_type = NULL;
2144 cs->indirect_info->offset = offset;
2145 cs->indirect_info->agg_contents = 1;
2146 cs->indirect_info->by_ref = by_ref;
2147 return;
2148 }
2149
2150 /* Now we need to try to match the complex pattern of calling a member
2151 pointer. */
2152 if (gimple_code (def) != GIMPLE_PHI
2153 || gimple_phi_num_args (def) != 2
2154 || !POINTER_TYPE_P (TREE_TYPE (target))
2155 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2156 return;
2157
2158 /* First, we need to check whether one of these is a load from a member
2159 pointer that is a parameter to this function. */
2160 tree n1 = PHI_ARG_DEF (def, 0);
2161 tree n2 = PHI_ARG_DEF (def, 1);
2162 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2163 return;
2164 gimple d1 = SSA_NAME_DEF_STMT (n1);
2165 gimple d2 = SSA_NAME_DEF_STMT (n2);
2166
2167 tree rec;
2168 basic_block bb, virt_bb;
2169 basic_block join = gimple_bb (def);
2170 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2171 {
2172 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2173 return;
2174
2175 bb = EDGE_PRED (join, 0)->src;
2176 virt_bb = gimple_bb (d2);
2177 }
2178 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2179 {
2180 bb = EDGE_PRED (join, 1)->src;
2181 virt_bb = gimple_bb (d1);
2182 }
2183 else
2184 return;
2185
2186 /* Second, we need to check that the basic blocks are laid out in the way
2187 corresponding to the pattern. */
2188
2189 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2190 || single_pred (virt_bb) != bb
2191 || single_succ (virt_bb) != join)
2192 return;
2193
2194 /* Third, let's see that the branching is done depending on the least
2195 significant bit of the pfn. */
2196
2197 gimple branch = last_stmt (bb);
2198 if (!branch || gimple_code (branch) != GIMPLE_COND)
2199 return;
2200
2201 if ((gimple_cond_code (branch) != NE_EXPR
2202 && gimple_cond_code (branch) != EQ_EXPR)
2203 || !integer_zerop (gimple_cond_rhs (branch)))
2204 return;
2205
2206 tree cond = gimple_cond_lhs (branch);
2207 if (!ipa_is_ssa_with_stmt_def (cond))
2208 return;
2209
2210 def = SSA_NAME_DEF_STMT (cond);
2211 if (!is_gimple_assign (def)
2212 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2213 || !integer_onep (gimple_assign_rhs2 (def)))
2214 return;
2215
2216 cond = gimple_assign_rhs1 (def);
2217 if (!ipa_is_ssa_with_stmt_def (cond))
2218 return;
2219
2220 def = SSA_NAME_DEF_STMT (cond);
2221
2222 if (is_gimple_assign (def)
2223 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2224 {
2225 cond = gimple_assign_rhs1 (def);
2226 if (!ipa_is_ssa_with_stmt_def (cond))
2227 return;
2228 def = SSA_NAME_DEF_STMT (cond);
2229 }
2230
2231 tree rec2;
2232 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2233 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2234 == ptrmemfunc_vbit_in_delta),
2235 NULL);
2236 if (rec != rec2)
2237 return;
2238
2239 index = ipa_get_param_decl_index (info, rec);
2240 if (index >= 0
2241 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2242 {
2243 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2244 if (cs->indirect_info->offset != offset)
2245 cs->indirect_info->outer_type = NULL;
2246 cs->indirect_info->offset = offset;
2247 cs->indirect_info->agg_contents = 1;
2248 cs->indirect_info->member_ptr = 1;
2249 }
2250
2251 return;
2252 }
2253
2254 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2255 object referenced in the expression is a formal parameter of the caller
2256 FBI->node (described by FBI->info), create a call note for the
2257 statement. */
2258
2259 static void
2260 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2261 gimple call, tree target)
2262 {
2263 tree obj = OBJ_TYPE_REF_OBJECT (target);
2264 int index;
2265 HOST_WIDE_INT anc_offset;
2266
2267 if (!flag_devirtualize)
2268 return;
2269
2270 if (TREE_CODE (obj) != SSA_NAME)
2271 return;
2272
2273 struct ipa_node_params *info = fbi->info;
2274 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2275 {
2276 struct ipa_jump_func jfunc;
2277 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2278 return;
2279
2280 anc_offset = 0;
2281 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2282 gcc_assert (index >= 0);
2283 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2284 call, &jfunc))
2285 return;
2286 }
2287 else
2288 {
2289 struct ipa_jump_func jfunc;
2290 gimple stmt = SSA_NAME_DEF_STMT (obj);
2291 tree expr;
2292
2293 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2294 if (!expr)
2295 return;
2296 index = ipa_get_param_decl_index (info,
2297 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2298 gcc_assert (index >= 0);
2299 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2300 call, &jfunc, anc_offset))
2301 return;
2302 }
2303
2304 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2305 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2306 ii->offset = anc_offset;
2307 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2308 ii->otr_type = obj_type_ref_class (target);
2309 ii->polymorphic = 1;
2310 }
2311
2312 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2313 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2314 containing intermediate information about each formal parameter. */
2315
2316 static void
2317 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2318 {
2319 tree target = gimple_call_fn (call);
2320
2321 if (!target
2322 || (TREE_CODE (target) != SSA_NAME
2323 && !virtual_method_call_p (target)))
2324 return;
2325
2326 /* If we previously turned the call into a direct call, there is
2327 no need to analyze. */
2328 struct cgraph_edge *cs = cgraph_edge (fbi->node, call);
2329 if (cs && !cs->indirect_unknown_callee)
2330 return;
2331 if (TREE_CODE (target) == SSA_NAME)
2332 ipa_analyze_indirect_call_uses (fbi, call, target);
2333 else if (virtual_method_call_p (target))
2334 ipa_analyze_virtual_call_uses (fbi, call, target);
2335 }
2336
2337
2338 /* Analyze the call statement STMT with respect to formal parameters (described
2339 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2340 formal parameters are called. */
2341
2342 static void
2343 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2344 {
2345 if (is_gimple_call (stmt))
2346 ipa_analyze_call_uses (fbi, stmt);
2347 }
2348
2349 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2350 If OP is a parameter declaration, mark it as used in the info structure
2351 passed in DATA. */
2352
2353 static bool
2354 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2355 {
2356 struct ipa_node_params *info = (struct ipa_node_params *) data;
2357
2358 op = get_base_address (op);
2359 if (op
2360 && TREE_CODE (op) == PARM_DECL)
2361 {
2362 int index = ipa_get_param_decl_index (info, op);
2363 gcc_assert (index >= 0);
2364 ipa_set_param_used (info, index, true);
2365 }
2366
2367 return false;
2368 }
2369
2370 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2371 the findings in various structures of the associated ipa_node_params
2372 structure, such as parameter flags, notes etc. FBI holds various data about
2373 the function being analyzed. */
2374
2375 static void
2376 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2377 {
2378 gimple_stmt_iterator gsi;
2379 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2380 {
2381 gimple stmt = gsi_stmt (gsi);
2382
2383 if (is_gimple_debug (stmt))
2384 continue;
2385
2386 ipa_analyze_stmt_uses (fbi, stmt);
2387 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2388 visit_ref_for_mod_analysis,
2389 visit_ref_for_mod_analysis,
2390 visit_ref_for_mod_analysis);
2391 }
2392 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2393 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2394 visit_ref_for_mod_analysis,
2395 visit_ref_for_mod_analysis,
2396 visit_ref_for_mod_analysis);
2397 }
2398
2399 /* Calculate controlled uses of parameters of NODE. */
2400
2401 static void
2402 ipa_analyze_controlled_uses (struct cgraph_node *node)
2403 {
2404 struct ipa_node_params *info = IPA_NODE_REF (node);
2405
2406 for (int i = 0; i < ipa_get_param_count (info); i++)
2407 {
2408 tree parm = ipa_get_param (info, i);
2409 int controlled_uses = 0;
2410
2411 /* For SSA regs see if parameter is used. For non-SSA we compute
2412 the flag during modification analysis. */
2413 if (is_gimple_reg (parm))
2414 {
2415 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2416 parm);
2417 if (ddef && !has_zero_uses (ddef))
2418 {
2419 imm_use_iterator imm_iter;
2420 use_operand_p use_p;
2421
2422 ipa_set_param_used (info, i, true);
2423 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2424 if (!is_gimple_call (USE_STMT (use_p)))
2425 {
2426 if (!is_gimple_debug (USE_STMT (use_p)))
2427 {
2428 controlled_uses = IPA_UNDESCRIBED_USE;
2429 break;
2430 }
2431 }
2432 else
2433 controlled_uses++;
2434 }
2435 else
2436 controlled_uses = 0;
2437 }
2438 else
2439 controlled_uses = IPA_UNDESCRIBED_USE;
2440 ipa_set_controlled_uses (info, i, controlled_uses);
2441 }
2442 }
2443
2444 /* Free stuff in BI. */
2445
2446 static void
2447 free_ipa_bb_info (struct ipa_bb_info *bi)
2448 {
2449 bi->cg_edges.release ();
2450 bi->param_aa_statuses.release ();
2451 }
2452
2453 /* Dominator walker driving the analysis. */
2454
2455 class analysis_dom_walker : public dom_walker
2456 {
2457 public:
2458 analysis_dom_walker (struct func_body_info *fbi)
2459 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2460
2461 virtual void before_dom_children (basic_block);
2462
2463 private:
2464 struct func_body_info *m_fbi;
2465 };
2466
2467 void
2468 analysis_dom_walker::before_dom_children (basic_block bb)
2469 {
2470 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2471 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2472 }
2473
2474 /* Initialize the array describing properties of of formal parameters
2475 of NODE, analyze their uses and compute jump functions associated
2476 with actual arguments of calls from within NODE. */
2477
2478 void
2479 ipa_analyze_node (struct cgraph_node *node)
2480 {
2481 struct func_body_info fbi;
2482 struct ipa_node_params *info;
2483
2484 ipa_check_create_node_params ();
2485 ipa_check_create_edge_args ();
2486 info = IPA_NODE_REF (node);
2487
2488 if (info->analysis_done)
2489 return;
2490 info->analysis_done = 1;
2491
2492 if (ipa_func_spec_opts_forbid_analysis_p (node))
2493 {
2494 for (int i = 0; i < ipa_get_param_count (info); i++)
2495 {
2496 ipa_set_param_used (info, i, true);
2497 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2498 }
2499 return;
2500 }
2501
2502 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2503 push_cfun (func);
2504 calculate_dominance_info (CDI_DOMINATORS);
2505 ipa_initialize_node_params (node);
2506 ipa_analyze_controlled_uses (node);
2507
2508 fbi.node = node;
2509 fbi.info = IPA_NODE_REF (node);
2510 fbi.bb_infos = vNULL;
2511 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2512 fbi.param_count = ipa_get_param_count (info);
2513 fbi.aa_walked = 0;
2514
2515 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2516 {
2517 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2518 bi->cg_edges.safe_push (cs);
2519 }
2520
2521 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2522 {
2523 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2524 bi->cg_edges.safe_push (cs);
2525 }
2526
2527 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2528
2529 int i;
2530 struct ipa_bb_info *bi;
2531 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2532 free_ipa_bb_info (bi);
2533 fbi.bb_infos.release ();
2534 free_dominance_info (CDI_DOMINATORS);
2535 pop_cfun ();
2536 }
2537
2538 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2539 attempt a type-based devirtualization. If successful, return the
2540 target function declaration, otherwise return NULL. */
2541
2542 tree
2543 ipa_intraprocedural_devirtualization (gimple call)
2544 {
2545 tree binfo, token, fndecl;
2546 struct ipa_jump_func jfunc;
2547 tree otr = gimple_call_fn (call);
2548
2549 jfunc.type = IPA_JF_UNKNOWN;
2550 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2551 call, obj_type_ref_class (otr));
2552 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2553 return NULL_TREE;
2554 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2555 if (!binfo)
2556 return NULL_TREE;
2557 token = OBJ_TYPE_REF_TOKEN (otr);
2558 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2559 binfo);
2560 #ifdef ENABLE_CHECKING
2561 if (fndecl)
2562 gcc_assert (possible_polymorphic_call_target_p
2563 (otr, cgraph_get_node (fndecl)));
2564 #endif
2565 return fndecl;
2566 }
2567
2568 /* Update the jump function DST when the call graph edge corresponding to SRC is
2569 is being inlined, knowing that DST is of type ancestor and src of known
2570 type. */
2571
2572 static void
2573 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2574 struct ipa_jump_func *dst)
2575 {
2576 HOST_WIDE_INT combined_offset;
2577 tree combined_type;
2578
2579 if (!ipa_get_jf_ancestor_type_preserved (dst))
2580 {
2581 dst->type = IPA_JF_UNKNOWN;
2582 return;
2583 }
2584
2585 combined_offset = ipa_get_jf_known_type_offset (src)
2586 + ipa_get_jf_ancestor_offset (dst);
2587 combined_type = ipa_get_jf_ancestor_type (dst);
2588
2589 ipa_set_jf_known_type (dst, combined_offset,
2590 ipa_get_jf_known_type_base_type (src),
2591 combined_type);
2592 }
2593
2594 /* Update the jump functions associated with call graph edge E when the call
2595 graph edge CS is being inlined, assuming that E->caller is already (possibly
2596 indirectly) inlined into CS->callee and that E has not been inlined. */
2597
2598 static void
2599 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2600 struct cgraph_edge *e)
2601 {
2602 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2603 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2604 int count = ipa_get_cs_argument_count (args);
2605 int i;
2606
2607 for (i = 0; i < count; i++)
2608 {
2609 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2610
2611 if (dst->type == IPA_JF_ANCESTOR)
2612 {
2613 struct ipa_jump_func *src;
2614 int dst_fid = dst->value.ancestor.formal_id;
2615
2616 /* Variable number of arguments can cause havoc if we try to access
2617 one that does not exist in the inlined edge. So make sure we
2618 don't. */
2619 if (dst_fid >= ipa_get_cs_argument_count (top))
2620 {
2621 dst->type = IPA_JF_UNKNOWN;
2622 continue;
2623 }
2624
2625 src = ipa_get_ith_jump_func (top, dst_fid);
2626
2627 if (src->agg.items
2628 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2629 {
2630 struct ipa_agg_jf_item *item;
2631 int j;
2632
2633 /* Currently we do not produce clobber aggregate jump functions,
2634 replace with merging when we do. */
2635 gcc_assert (!dst->agg.items);
2636
2637 dst->agg.items = vec_safe_copy (src->agg.items);
2638 dst->agg.by_ref = src->agg.by_ref;
2639 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2640 item->offset -= dst->value.ancestor.offset;
2641 }
2642
2643 if (src->type == IPA_JF_KNOWN_TYPE)
2644 combine_known_type_and_ancestor_jfs (src, dst);
2645 else if (src->type == IPA_JF_PASS_THROUGH
2646 && src->value.pass_through.operation == NOP_EXPR)
2647 {
2648 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2649 dst->value.ancestor.agg_preserved &=
2650 src->value.pass_through.agg_preserved;
2651 dst->value.ancestor.type_preserved &=
2652 src->value.pass_through.type_preserved;
2653 }
2654 else if (src->type == IPA_JF_ANCESTOR)
2655 {
2656 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2657 dst->value.ancestor.offset += src->value.ancestor.offset;
2658 dst->value.ancestor.agg_preserved &=
2659 src->value.ancestor.agg_preserved;
2660 dst->value.ancestor.type_preserved &=
2661 src->value.ancestor.type_preserved;
2662 }
2663 else
2664 dst->type = IPA_JF_UNKNOWN;
2665 }
2666 else if (dst->type == IPA_JF_PASS_THROUGH)
2667 {
2668 struct ipa_jump_func *src;
2669 /* We must check range due to calls with variable number of arguments
2670 and we cannot combine jump functions with operations. */
2671 if (dst->value.pass_through.operation == NOP_EXPR
2672 && (dst->value.pass_through.formal_id
2673 < ipa_get_cs_argument_count (top)))
2674 {
2675 int dst_fid = dst->value.pass_through.formal_id;
2676 src = ipa_get_ith_jump_func (top, dst_fid);
2677 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2678
2679 switch (src->type)
2680 {
2681 case IPA_JF_UNKNOWN:
2682 dst->type = IPA_JF_UNKNOWN;
2683 break;
2684 case IPA_JF_KNOWN_TYPE:
2685 if (ipa_get_jf_pass_through_type_preserved (dst))
2686 ipa_set_jf_known_type (dst,
2687 ipa_get_jf_known_type_offset (src),
2688 ipa_get_jf_known_type_base_type (src),
2689 ipa_get_jf_known_type_component_type (src));
2690 else
2691 dst->type = IPA_JF_UNKNOWN;
2692 break;
2693 case IPA_JF_CONST:
2694 ipa_set_jf_cst_copy (dst, src);
2695 break;
2696
2697 case IPA_JF_PASS_THROUGH:
2698 {
2699 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2700 enum tree_code operation;
2701 operation = ipa_get_jf_pass_through_operation (src);
2702
2703 if (operation == NOP_EXPR)
2704 {
2705 bool agg_p, type_p;
2706 agg_p = dst_agg_p
2707 && ipa_get_jf_pass_through_agg_preserved (src);
2708 type_p = ipa_get_jf_pass_through_type_preserved (src)
2709 && ipa_get_jf_pass_through_type_preserved (dst);
2710 ipa_set_jf_simple_pass_through (dst, formal_id,
2711 agg_p, type_p);
2712 }
2713 else
2714 {
2715 tree operand = ipa_get_jf_pass_through_operand (src);
2716 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2717 operation);
2718 }
2719 break;
2720 }
2721 case IPA_JF_ANCESTOR:
2722 {
2723 bool agg_p, type_p;
2724 agg_p = dst_agg_p
2725 && ipa_get_jf_ancestor_agg_preserved (src);
2726 type_p = ipa_get_jf_ancestor_type_preserved (src)
2727 && ipa_get_jf_pass_through_type_preserved (dst);
2728 ipa_set_ancestor_jf (dst,
2729 ipa_get_jf_ancestor_offset (src),
2730 ipa_get_jf_ancestor_type (src),
2731 ipa_get_jf_ancestor_formal_id (src),
2732 agg_p, type_p);
2733 break;
2734 }
2735 default:
2736 gcc_unreachable ();
2737 }
2738
2739 if (src->agg.items
2740 && (dst_agg_p || !src->agg.by_ref))
2741 {
2742 /* Currently we do not produce clobber aggregate jump
2743 functions, replace with merging when we do. */
2744 gcc_assert (!dst->agg.items);
2745
2746 dst->agg.by_ref = src->agg.by_ref;
2747 dst->agg.items = vec_safe_copy (src->agg.items);
2748 }
2749 }
2750 else
2751 dst->type = IPA_JF_UNKNOWN;
2752 }
2753 }
2754 }
2755
2756 /* If TARGET is an addr_expr of a function declaration, make it the destination
2757 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2758
2759 struct cgraph_edge *
2760 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2761 {
2762 struct cgraph_node *callee;
2763 struct inline_edge_summary *es = inline_edge_summary (ie);
2764 bool unreachable = false;
2765
2766 if (TREE_CODE (target) == ADDR_EXPR)
2767 target = TREE_OPERAND (target, 0);
2768 if (TREE_CODE (target) != FUNCTION_DECL)
2769 {
2770 target = canonicalize_constructor_val (target, NULL);
2771 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2772 {
2773 if (ie->indirect_info->member_ptr)
2774 /* Member pointer call that goes through a VMT lookup. */
2775 return NULL;
2776
2777 if (dump_enabled_p ())
2778 {
2779 location_t loc = gimple_location_safe (ie->call_stmt);
2780 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2781 "discovered direct call to non-function in %s/%i, "
2782 "making it __builtin_unreachable\n",
2783 ie->caller->name (), ie->caller->order);
2784 }
2785
2786 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2787 callee = cgraph_get_create_node (target);
2788 unreachable = true;
2789 }
2790 else
2791 callee = cgraph_get_node (target);
2792 }
2793 else
2794 callee = cgraph_get_node (target);
2795
2796 /* Because may-edges are not explicitely represented and vtable may be external,
2797 we may create the first reference to the object in the unit. */
2798 if (!callee || callee->global.inlined_to)
2799 {
2800
2801 /* We are better to ensure we can refer to it.
2802 In the case of static functions we are out of luck, since we already
2803 removed its body. In the case of public functions we may or may
2804 not introduce the reference. */
2805 if (!canonicalize_constructor_val (target, NULL)
2806 || !TREE_PUBLIC (target))
2807 {
2808 if (dump_file)
2809 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2810 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2811 xstrdup (ie->caller->name ()),
2812 ie->caller->order,
2813 xstrdup (ie->callee->name ()),
2814 ie->callee->order);
2815 return NULL;
2816 }
2817 callee = cgraph_get_create_node (target);
2818 }
2819
2820 if (!dbg_cnt (devirt))
2821 return NULL;
2822
2823 ipa_check_create_node_params ();
2824
2825 /* We can not make edges to inline clones. It is bug that someone removed
2826 the cgraph node too early. */
2827 gcc_assert (!callee->global.inlined_to);
2828
2829 if (dump_file && !unreachable)
2830 {
2831 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2832 "(%s/%i -> %s/%i), for stmt ",
2833 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2834 xstrdup (ie->caller->name ()),
2835 ie->caller->order,
2836 xstrdup (callee->name ()),
2837 callee->order);
2838 if (ie->call_stmt)
2839 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2840 else
2841 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2842 }
2843 if (dump_enabled_p ())
2844 {
2845 location_t loc = gimple_location_safe (ie->call_stmt);
2846
2847 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2848 "converting indirect call in %s to direct call to %s\n",
2849 ie->caller->name (), callee->name ());
2850 }
2851 ie = cgraph_make_edge_direct (ie, callee);
2852 es = inline_edge_summary (ie);
2853 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2854 - eni_size_weights.call_cost);
2855 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2856 - eni_time_weights.call_cost);
2857
2858 return ie;
2859 }
2860
2861 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2862 return NULL if there is not any. BY_REF specifies whether the value has to
2863 be passed by reference or by value. */
2864
2865 tree
2866 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2867 HOST_WIDE_INT offset, bool by_ref)
2868 {
2869 struct ipa_agg_jf_item *item;
2870 int i;
2871
2872 if (by_ref != agg->by_ref)
2873 return NULL;
2874
2875 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2876 if (item->offset == offset)
2877 {
2878 /* Currently we do not have clobber values, return NULL for them once
2879 we do. */
2880 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2881 return item->value;
2882 }
2883 return NULL;
2884 }
2885
2886 /* Remove a reference to SYMBOL from the list of references of a node given by
2887 reference description RDESC. Return true if the reference has been
2888 successfully found and removed. */
2889
2890 static bool
2891 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2892 {
2893 struct ipa_ref *to_del;
2894 struct cgraph_edge *origin;
2895
2896 origin = rdesc->cs;
2897 if (!origin)
2898 return false;
2899 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2900 origin->lto_stmt_uid);
2901 if (!to_del)
2902 return false;
2903
2904 to_del->remove_reference ();
2905 if (dump_file)
2906 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2907 xstrdup (origin->caller->name ()),
2908 origin->caller->order, xstrdup (symbol->name ()));
2909 return true;
2910 }
2911
2912 /* If JFUNC has a reference description with refcount different from
2913 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2914 NULL. JFUNC must be a constant jump function. */
2915
2916 static struct ipa_cst_ref_desc *
2917 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2918 {
2919 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2920 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2921 return rdesc;
2922 else
2923 return NULL;
2924 }
2925
2926 /* If the value of constant jump function JFUNC is an address of a function
2927 declaration, return the associated call graph node. Otherwise return
2928 NULL. */
2929
2930 static cgraph_node *
2931 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2932 {
2933 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2934 tree cst = ipa_get_jf_constant (jfunc);
2935 if (TREE_CODE (cst) != ADDR_EXPR
2936 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2937 return NULL;
2938
2939 return cgraph_get_node (TREE_OPERAND (cst, 0));
2940 }
2941
2942
2943 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2944 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2945 the edge specified in the rdesc. Return false if either the symbol or the
2946 reference could not be found, otherwise return true. */
2947
2948 static bool
2949 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2950 {
2951 struct ipa_cst_ref_desc *rdesc;
2952 if (jfunc->type == IPA_JF_CONST
2953 && (rdesc = jfunc_rdesc_usable (jfunc))
2954 && --rdesc->refcount == 0)
2955 {
2956 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2957 if (!symbol)
2958 return false;
2959
2960 return remove_described_reference (symbol, rdesc);
2961 }
2962 return true;
2963 }
2964
2965 /* Try to find a destination for indirect edge IE that corresponds to a simple
2966 call or a call of a member function pointer and where the destination is a
2967 pointer formal parameter described by jump function JFUNC. If it can be
2968 determined, return the newly direct edge, otherwise return NULL.
2969 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2970
2971 static struct cgraph_edge *
2972 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2973 struct ipa_jump_func *jfunc,
2974 struct ipa_node_params *new_root_info)
2975 {
2976 struct cgraph_edge *cs;
2977 tree target;
2978 bool agg_contents = ie->indirect_info->agg_contents;
2979
2980 if (ie->indirect_info->agg_contents)
2981 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2982 ie->indirect_info->offset,
2983 ie->indirect_info->by_ref);
2984 else
2985 target = ipa_value_from_jfunc (new_root_info, jfunc);
2986 if (!target)
2987 return NULL;
2988 cs = ipa_make_edge_direct_to_target (ie, target);
2989
2990 if (cs && !agg_contents)
2991 {
2992 bool ok;
2993 gcc_checking_assert (cs->callee
2994 && (cs != ie
2995 || jfunc->type != IPA_JF_CONST
2996 || !cgraph_node_for_jfunc (jfunc)
2997 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2998 ok = try_decrement_rdesc_refcount (jfunc);
2999 gcc_checking_assert (ok);
3000 }
3001
3002 return cs;
3003 }
3004
3005 /* Return the target to be used in cases of impossible devirtualization. IE
3006 and target (the latter can be NULL) are dumped when dumping is enabled. */
3007
3008 tree
3009 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3010 {
3011 if (dump_file)
3012 {
3013 if (target)
3014 fprintf (dump_file,
3015 "Type inconsistent devirtualization: %s/%i->%s\n",
3016 ie->caller->name (), ie->caller->order,
3017 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3018 else
3019 fprintf (dump_file,
3020 "No devirtualization target in %s/%i\n",
3021 ie->caller->name (), ie->caller->order);
3022 }
3023 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3024 cgraph_get_create_node (new_target);
3025 return new_target;
3026 }
3027
3028 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3029 call based on a formal parameter which is described by jump function JFUNC
3030 and if it can be determined, make it direct and return the direct edge.
3031 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3032 are relative to. */
3033
3034 static struct cgraph_edge *
3035 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3036 struct ipa_jump_func *jfunc,
3037 struct ipa_node_params *new_root_info)
3038 {
3039 tree binfo, target;
3040
3041 if (!flag_devirtualize)
3042 return NULL;
3043
3044 /* First try to do lookup via known virtual table pointer value. */
3045 if (!ie->indirect_info->by_ref)
3046 {
3047 tree vtable;
3048 unsigned HOST_WIDE_INT offset;
3049 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
3050 ie->indirect_info->offset,
3051 true);
3052 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3053 {
3054 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3055 vtable, offset);
3056 if (target)
3057 {
3058 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
3059 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
3060 || !possible_polymorphic_call_target_p
3061 (ie, cgraph_get_node (target)))
3062 target = ipa_impossible_devirt_target (ie, target);
3063 return ipa_make_edge_direct_to_target (ie, target);
3064 }
3065 }
3066 }
3067
3068 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
3069
3070 if (!binfo)
3071 return NULL;
3072
3073 if (TREE_CODE (binfo) != TREE_BINFO)
3074 {
3075 ipa_polymorphic_call_context context;
3076 vec <cgraph_node *>targets;
3077 bool final;
3078
3079 if (!get_polymorphic_call_info_from_invariant
3080 (&context, binfo, ie->indirect_info->otr_type,
3081 ie->indirect_info->offset))
3082 return NULL;
3083 targets = possible_polymorphic_call_targets
3084 (ie->indirect_info->otr_type,
3085 ie->indirect_info->otr_token,
3086 context, &final);
3087 if (!final || targets.length () > 1)
3088 return NULL;
3089 if (targets.length () == 1)
3090 target = targets[0]->decl;
3091 else
3092 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3093 }
3094 else
3095 {
3096 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3097 ie->indirect_info->otr_type);
3098 if (binfo)
3099 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3100 binfo);
3101 else
3102 return NULL;
3103 }
3104
3105 if (target)
3106 {
3107 if (!possible_polymorphic_call_target_p (ie, cgraph_get_node (target)))
3108 target = ipa_impossible_devirt_target (ie, target);
3109 return ipa_make_edge_direct_to_target (ie, target);
3110 }
3111 else
3112 return NULL;
3113 }
3114
3115 /* Update the param called notes associated with NODE when CS is being inlined,
3116 assuming NODE is (potentially indirectly) inlined into CS->callee.
3117 Moreover, if the callee is discovered to be constant, create a new cgraph
3118 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3119 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3120
3121 static bool
3122 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3123 struct cgraph_node *node,
3124 vec<cgraph_edge_p> *new_edges)
3125 {
3126 struct ipa_edge_args *top;
3127 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3128 struct ipa_node_params *new_root_info;
3129 bool res = false;
3130
3131 ipa_check_create_edge_args ();
3132 top = IPA_EDGE_REF (cs);
3133 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3134 ? cs->caller->global.inlined_to
3135 : cs->caller);
3136
3137 for (ie = node->indirect_calls; ie; ie = next_ie)
3138 {
3139 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3140 struct ipa_jump_func *jfunc;
3141 int param_index;
3142
3143 next_ie = ie->next_callee;
3144
3145 if (ici->param_index == -1)
3146 continue;
3147
3148 /* We must check range due to calls with variable number of arguments: */
3149 if (ici->param_index >= ipa_get_cs_argument_count (top))
3150 {
3151 ici->param_index = -1;
3152 continue;
3153 }
3154
3155 param_index = ici->param_index;
3156 jfunc = ipa_get_ith_jump_func (top, param_index);
3157
3158 if (!flag_indirect_inlining)
3159 new_direct_edge = NULL;
3160 else if (ici->polymorphic)
3161 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3162 new_root_info);
3163 else
3164 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3165 new_root_info);
3166 /* If speculation was removed, then we need to do nothing. */
3167 if (new_direct_edge && new_direct_edge != ie)
3168 {
3169 new_direct_edge->indirect_inlining_edge = 1;
3170 top = IPA_EDGE_REF (cs);
3171 res = true;
3172 }
3173 else if (new_direct_edge)
3174 {
3175 new_direct_edge->indirect_inlining_edge = 1;
3176 if (new_direct_edge->call_stmt)
3177 new_direct_edge->call_stmt_cannot_inline_p
3178 = !gimple_check_call_matching_types (
3179 new_direct_edge->call_stmt,
3180 new_direct_edge->callee->decl, false);
3181 if (new_edges)
3182 {
3183 new_edges->safe_push (new_direct_edge);
3184 res = true;
3185 }
3186 top = IPA_EDGE_REF (cs);
3187 }
3188 else if (jfunc->type == IPA_JF_PASS_THROUGH
3189 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3190 {
3191 if ((ici->agg_contents
3192 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3193 || (ici->polymorphic
3194 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3195 ici->param_index = -1;
3196 else
3197 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3198 }
3199 else if (jfunc->type == IPA_JF_ANCESTOR)
3200 {
3201 if ((ici->agg_contents
3202 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3203 || (ici->polymorphic
3204 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3205 ici->param_index = -1;
3206 else
3207 {
3208 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3209 if (ipa_get_jf_ancestor_offset (jfunc))
3210 ici->outer_type = NULL;
3211 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3212 }
3213 }
3214 else
3215 /* Either we can find a destination for this edge now or never. */
3216 ici->param_index = -1;
3217 }
3218
3219 return res;
3220 }
3221
3222 /* Recursively traverse subtree of NODE (including node) made of inlined
3223 cgraph_edges when CS has been inlined and invoke
3224 update_indirect_edges_after_inlining on all nodes and
3225 update_jump_functions_after_inlining on all non-inlined edges that lead out
3226 of this subtree. Newly discovered indirect edges will be added to
3227 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3228 created. */
3229
3230 static bool
3231 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3232 struct cgraph_node *node,
3233 vec<cgraph_edge_p> *new_edges)
3234 {
3235 struct cgraph_edge *e;
3236 bool res;
3237
3238 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3239
3240 for (e = node->callees; e; e = e->next_callee)
3241 if (!e->inline_failed)
3242 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3243 else
3244 update_jump_functions_after_inlining (cs, e);
3245 for (e = node->indirect_calls; e; e = e->next_callee)
3246 update_jump_functions_after_inlining (cs, e);
3247
3248 return res;
3249 }
3250
3251 /* Combine two controlled uses counts as done during inlining. */
3252
3253 static int
3254 combine_controlled_uses_counters (int c, int d)
3255 {
3256 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3257 return IPA_UNDESCRIBED_USE;
3258 else
3259 return c + d - 1;
3260 }
3261
3262 /* Propagate number of controlled users from CS->caleee to the new root of the
3263 tree of inlined nodes. */
3264
3265 static void
3266 propagate_controlled_uses (struct cgraph_edge *cs)
3267 {
3268 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3269 struct cgraph_node *new_root = cs->caller->global.inlined_to
3270 ? cs->caller->global.inlined_to : cs->caller;
3271 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3272 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3273 int count, i;
3274
3275 count = MIN (ipa_get_cs_argument_count (args),
3276 ipa_get_param_count (old_root_info));
3277 for (i = 0; i < count; i++)
3278 {
3279 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3280 struct ipa_cst_ref_desc *rdesc;
3281
3282 if (jf->type == IPA_JF_PASS_THROUGH)
3283 {
3284 int src_idx, c, d;
3285 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3286 c = ipa_get_controlled_uses (new_root_info, src_idx);
3287 d = ipa_get_controlled_uses (old_root_info, i);
3288
3289 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3290 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3291 c = combine_controlled_uses_counters (c, d);
3292 ipa_set_controlled_uses (new_root_info, src_idx, c);
3293 if (c == 0 && new_root_info->ipcp_orig_node)
3294 {
3295 struct cgraph_node *n;
3296 struct ipa_ref *ref;
3297 tree t = new_root_info->known_vals[src_idx];
3298
3299 if (t && TREE_CODE (t) == ADDR_EXPR
3300 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3301 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
3302 && (ref = new_root->find_reference (n, NULL, 0)))
3303 {
3304 if (dump_file)
3305 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3306 "reference from %s/%i to %s/%i.\n",
3307 xstrdup (new_root->name ()),
3308 new_root->order,
3309 xstrdup (n->name ()), n->order);
3310 ref->remove_reference ();
3311 }
3312 }
3313 }
3314 else if (jf->type == IPA_JF_CONST
3315 && (rdesc = jfunc_rdesc_usable (jf)))
3316 {
3317 int d = ipa_get_controlled_uses (old_root_info, i);
3318 int c = rdesc->refcount;
3319 rdesc->refcount = combine_controlled_uses_counters (c, d);
3320 if (rdesc->refcount == 0)
3321 {
3322 tree cst = ipa_get_jf_constant (jf);
3323 struct cgraph_node *n;
3324 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3325 && TREE_CODE (TREE_OPERAND (cst, 0))
3326 == FUNCTION_DECL);
3327 n = cgraph_get_node (TREE_OPERAND (cst, 0));
3328 if (n)
3329 {
3330 struct cgraph_node *clone;
3331 bool ok;
3332 ok = remove_described_reference (n, rdesc);
3333 gcc_checking_assert (ok);
3334
3335 clone = cs->caller;
3336 while (clone->global.inlined_to
3337 && clone != rdesc->cs->caller
3338 && IPA_NODE_REF (clone)->ipcp_orig_node)
3339 {
3340 struct ipa_ref *ref;
3341 ref = clone->find_reference (n, NULL, 0);
3342 if (ref)
3343 {
3344 if (dump_file)
3345 fprintf (dump_file, "ipa-prop: Removing "
3346 "cloning-created reference "
3347 "from %s/%i to %s/%i.\n",
3348 xstrdup (clone->name ()),
3349 clone->order,
3350 xstrdup (n->name ()),
3351 n->order);
3352 ref->remove_reference ();
3353 }
3354 clone = clone->callers->caller;
3355 }
3356 }
3357 }
3358 }
3359 }
3360
3361 for (i = ipa_get_param_count (old_root_info);
3362 i < ipa_get_cs_argument_count (args);
3363 i++)
3364 {
3365 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3366
3367 if (jf->type == IPA_JF_CONST)
3368 {
3369 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3370 if (rdesc)
3371 rdesc->refcount = IPA_UNDESCRIBED_USE;
3372 }
3373 else if (jf->type == IPA_JF_PASS_THROUGH)
3374 ipa_set_controlled_uses (new_root_info,
3375 jf->value.pass_through.formal_id,
3376 IPA_UNDESCRIBED_USE);
3377 }
3378 }
3379
3380 /* Update jump functions and call note functions on inlining the call site CS.
3381 CS is expected to lead to a node already cloned by
3382 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3383 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3384 created. */
3385
3386 bool
3387 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3388 vec<cgraph_edge_p> *new_edges)
3389 {
3390 bool changed;
3391 /* Do nothing if the preparation phase has not been carried out yet
3392 (i.e. during early inlining). */
3393 if (!ipa_node_params_vector.exists ())
3394 return false;
3395 gcc_assert (ipa_edge_args_vector);
3396
3397 propagate_controlled_uses (cs);
3398 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3399
3400 return changed;
3401 }
3402
3403 /* Frees all dynamically allocated structures that the argument info points
3404 to. */
3405
3406 void
3407 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3408 {
3409 vec_free (args->jump_functions);
3410 memset (args, 0, sizeof (*args));
3411 }
3412
3413 /* Free all ipa_edge structures. */
3414
3415 void
3416 ipa_free_all_edge_args (void)
3417 {
3418 int i;
3419 struct ipa_edge_args *args;
3420
3421 if (!ipa_edge_args_vector)
3422 return;
3423
3424 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3425 ipa_free_edge_args_substructures (args);
3426
3427 vec_free (ipa_edge_args_vector);
3428 }
3429
3430 /* Frees all dynamically allocated structures that the param info points
3431 to. */
3432
3433 void
3434 ipa_free_node_params_substructures (struct ipa_node_params *info)
3435 {
3436 info->descriptors.release ();
3437 free (info->lattices);
3438 /* Lattice values and their sources are deallocated with their alocation
3439 pool. */
3440 info->known_vals.release ();
3441 memset (info, 0, sizeof (*info));
3442 }
3443
3444 /* Free all ipa_node_params structures. */
3445
3446 void
3447 ipa_free_all_node_params (void)
3448 {
3449 int i;
3450 struct ipa_node_params *info;
3451
3452 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3453 ipa_free_node_params_substructures (info);
3454
3455 ipa_node_params_vector.release ();
3456 }
3457
3458 /* Set the aggregate replacements of NODE to be AGGVALS. */
3459
3460 void
3461 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3462 struct ipa_agg_replacement_value *aggvals)
3463 {
3464 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3465 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3466
3467 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3468 }
3469
3470 /* Hook that is called by cgraph.c when an edge is removed. */
3471
3472 static void
3473 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3474 {
3475 struct ipa_edge_args *args;
3476
3477 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3478 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3479 return;
3480
3481 args = IPA_EDGE_REF (cs);
3482 if (args->jump_functions)
3483 {
3484 struct ipa_jump_func *jf;
3485 int i;
3486 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3487 {
3488 struct ipa_cst_ref_desc *rdesc;
3489 try_decrement_rdesc_refcount (jf);
3490 if (jf->type == IPA_JF_CONST
3491 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3492 && rdesc->cs == cs)
3493 rdesc->cs = NULL;
3494 }
3495 }
3496
3497 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3498 }
3499
3500 /* Hook that is called by cgraph.c when a node is removed. */
3501
3502 static void
3503 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3504 {
3505 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3506 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3507 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3508 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3509 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3510 }
3511
3512 /* Hook that is called by cgraph.c when an edge is duplicated. */
3513
3514 static void
3515 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3516 __attribute__((unused)) void *data)
3517 {
3518 struct ipa_edge_args *old_args, *new_args;
3519 unsigned int i;
3520
3521 ipa_check_create_edge_args ();
3522
3523 old_args = IPA_EDGE_REF (src);
3524 new_args = IPA_EDGE_REF (dst);
3525
3526 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3527
3528 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3529 {
3530 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3531 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3532
3533 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3534
3535 if (src_jf->type == IPA_JF_CONST)
3536 {
3537 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3538
3539 if (!src_rdesc)
3540 dst_jf->value.constant.rdesc = NULL;
3541 else if (src->caller == dst->caller)
3542 {
3543 struct ipa_ref *ref;
3544 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3545 gcc_checking_assert (n);
3546 ref = src->caller->find_reference (n, src->call_stmt,
3547 src->lto_stmt_uid);
3548 gcc_checking_assert (ref);
3549 dst->caller->clone_reference (ref, ref->stmt);
3550
3551 gcc_checking_assert (ipa_refdesc_pool);
3552 struct ipa_cst_ref_desc *dst_rdesc
3553 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3554 dst_rdesc->cs = dst;
3555 dst_rdesc->refcount = src_rdesc->refcount;
3556 dst_rdesc->next_duplicate = NULL;
3557 dst_jf->value.constant.rdesc = dst_rdesc;
3558 }
3559 else if (src_rdesc->cs == src)
3560 {
3561 struct ipa_cst_ref_desc *dst_rdesc;
3562 gcc_checking_assert (ipa_refdesc_pool);
3563 dst_rdesc
3564 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3565 dst_rdesc->cs = dst;
3566 dst_rdesc->refcount = src_rdesc->refcount;
3567 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3568 src_rdesc->next_duplicate = dst_rdesc;
3569 dst_jf->value.constant.rdesc = dst_rdesc;
3570 }
3571 else
3572 {
3573 struct ipa_cst_ref_desc *dst_rdesc;
3574 /* This can happen during inlining, when a JFUNC can refer to a
3575 reference taken in a function up in the tree of inline clones.
3576 We need to find the duplicate that refers to our tree of
3577 inline clones. */
3578
3579 gcc_assert (dst->caller->global.inlined_to);
3580 for (dst_rdesc = src_rdesc->next_duplicate;
3581 dst_rdesc;
3582 dst_rdesc = dst_rdesc->next_duplicate)
3583 {
3584 struct cgraph_node *top;
3585 top = dst_rdesc->cs->caller->global.inlined_to
3586 ? dst_rdesc->cs->caller->global.inlined_to
3587 : dst_rdesc->cs->caller;
3588 if (dst->caller->global.inlined_to == top)
3589 break;
3590 }
3591 gcc_assert (dst_rdesc);
3592 dst_jf->value.constant.rdesc = dst_rdesc;
3593 }
3594 }
3595 }
3596 }
3597
3598 /* Hook that is called by cgraph.c when a node is duplicated. */
3599
3600 static void
3601 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3602 ATTRIBUTE_UNUSED void *data)
3603 {
3604 struct ipa_node_params *old_info, *new_info;
3605 struct ipa_agg_replacement_value *old_av, *new_av;
3606
3607 ipa_check_create_node_params ();
3608 old_info = IPA_NODE_REF (src);
3609 new_info = IPA_NODE_REF (dst);
3610
3611 new_info->descriptors = old_info->descriptors.copy ();
3612 new_info->lattices = NULL;
3613 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3614
3615 new_info->analysis_done = old_info->analysis_done;
3616 new_info->node_enqueued = old_info->node_enqueued;
3617
3618 old_av = ipa_get_agg_replacements_for_node (src);
3619 if (!old_av)
3620 return;
3621
3622 new_av = NULL;
3623 while (old_av)
3624 {
3625 struct ipa_agg_replacement_value *v;
3626
3627 v = ggc_alloc<ipa_agg_replacement_value> ();
3628 memcpy (v, old_av, sizeof (*v));
3629 v->next = new_av;
3630 new_av = v;
3631 old_av = old_av->next;
3632 }
3633 ipa_set_node_agg_value_chain (dst, new_av);
3634 }
3635
3636
3637 /* Analyze newly added function into callgraph. */
3638
3639 static void
3640 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3641 {
3642 if (cgraph_function_with_gimple_body_p (node))
3643 ipa_analyze_node (node);
3644 }
3645
3646 /* Register our cgraph hooks if they are not already there. */
3647
3648 void
3649 ipa_register_cgraph_hooks (void)
3650 {
3651 if (!edge_removal_hook_holder)
3652 edge_removal_hook_holder =
3653 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3654 if (!node_removal_hook_holder)
3655 node_removal_hook_holder =
3656 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3657 if (!edge_duplication_hook_holder)
3658 edge_duplication_hook_holder =
3659 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3660 if (!node_duplication_hook_holder)
3661 node_duplication_hook_holder =
3662 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3663 function_insertion_hook_holder =
3664 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3665 }
3666
3667 /* Unregister our cgraph hooks if they are not already there. */
3668
3669 static void
3670 ipa_unregister_cgraph_hooks (void)
3671 {
3672 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3673 edge_removal_hook_holder = NULL;
3674 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3675 node_removal_hook_holder = NULL;
3676 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3677 edge_duplication_hook_holder = NULL;
3678 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3679 node_duplication_hook_holder = NULL;
3680 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3681 function_insertion_hook_holder = NULL;
3682 }
3683
3684 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3685 longer needed after ipa-cp. */
3686
3687 void
3688 ipa_free_all_structures_after_ipa_cp (void)
3689 {
3690 if (!optimize)
3691 {
3692 ipa_free_all_edge_args ();
3693 ipa_free_all_node_params ();
3694 free_alloc_pool (ipcp_sources_pool);
3695 free_alloc_pool (ipcp_values_pool);
3696 free_alloc_pool (ipcp_agg_lattice_pool);
3697 ipa_unregister_cgraph_hooks ();
3698 if (ipa_refdesc_pool)
3699 free_alloc_pool (ipa_refdesc_pool);
3700 }
3701 }
3702
3703 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3704 longer needed after indirect inlining. */
3705
3706 void
3707 ipa_free_all_structures_after_iinln (void)
3708 {
3709 ipa_free_all_edge_args ();
3710 ipa_free_all_node_params ();
3711 ipa_unregister_cgraph_hooks ();
3712 if (ipcp_sources_pool)
3713 free_alloc_pool (ipcp_sources_pool);
3714 if (ipcp_values_pool)
3715 free_alloc_pool (ipcp_values_pool);
3716 if (ipcp_agg_lattice_pool)
3717 free_alloc_pool (ipcp_agg_lattice_pool);
3718 if (ipa_refdesc_pool)
3719 free_alloc_pool (ipa_refdesc_pool);
3720 }
3721
3722 /* Print ipa_tree_map data structures of all functions in the
3723 callgraph to F. */
3724
3725 void
3726 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3727 {
3728 int i, count;
3729 struct ipa_node_params *info;
3730
3731 if (!node->definition)
3732 return;
3733 info = IPA_NODE_REF (node);
3734 fprintf (f, " function %s/%i parameter descriptors:\n",
3735 node->name (), node->order);
3736 count = ipa_get_param_count (info);
3737 for (i = 0; i < count; i++)
3738 {
3739 int c;
3740
3741 fprintf (f, " ");
3742 ipa_dump_param (f, info, i);
3743 if (ipa_is_param_used (info, i))
3744 fprintf (f, " used");
3745 c = ipa_get_controlled_uses (info, i);
3746 if (c == IPA_UNDESCRIBED_USE)
3747 fprintf (f, " undescribed_use");
3748 else
3749 fprintf (f, " controlled_uses=%i", c);
3750 fprintf (f, "\n");
3751 }
3752 }
3753
3754 /* Print ipa_tree_map data structures of all functions in the
3755 callgraph to F. */
3756
3757 void
3758 ipa_print_all_params (FILE * f)
3759 {
3760 struct cgraph_node *node;
3761
3762 fprintf (f, "\nFunction parameters:\n");
3763 FOR_EACH_FUNCTION (node)
3764 ipa_print_node_params (f, node);
3765 }
3766
3767 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3768
3769 vec<tree>
3770 ipa_get_vector_of_formal_parms (tree fndecl)
3771 {
3772 vec<tree> args;
3773 int count;
3774 tree parm;
3775
3776 gcc_assert (!flag_wpa);
3777 count = count_formal_params (fndecl);
3778 args.create (count);
3779 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3780 args.quick_push (parm);
3781
3782 return args;
3783 }
3784
3785 /* Return a heap allocated vector containing types of formal parameters of
3786 function type FNTYPE. */
3787
3788 vec<tree>
3789 ipa_get_vector_of_formal_parm_types (tree fntype)
3790 {
3791 vec<tree> types;
3792 int count = 0;
3793 tree t;
3794
3795 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3796 count++;
3797
3798 types.create (count);
3799 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3800 types.quick_push (TREE_VALUE (t));
3801
3802 return types;
3803 }
3804
3805 /* Modify the function declaration FNDECL and its type according to the plan in
3806 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3807 to reflect the actual parameters being modified which are determined by the
3808 base_index field. */
3809
3810 void
3811 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3812 {
3813 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3814 tree orig_type = TREE_TYPE (fndecl);
3815 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3816
3817 /* The following test is an ugly hack, some functions simply don't have any
3818 arguments in their type. This is probably a bug but well... */
3819 bool care_for_types = (old_arg_types != NULL_TREE);
3820 bool last_parm_void;
3821 vec<tree> otypes;
3822 if (care_for_types)
3823 {
3824 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3825 == void_type_node);
3826 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3827 if (last_parm_void)
3828 gcc_assert (oparms.length () + 1 == otypes.length ());
3829 else
3830 gcc_assert (oparms.length () == otypes.length ());
3831 }
3832 else
3833 {
3834 last_parm_void = false;
3835 otypes.create (0);
3836 }
3837
3838 int len = adjustments.length ();
3839 tree *link = &DECL_ARGUMENTS (fndecl);
3840 tree new_arg_types = NULL;
3841 for (int i = 0; i < len; i++)
3842 {
3843 struct ipa_parm_adjustment *adj;
3844 gcc_assert (link);
3845
3846 adj = &adjustments[i];
3847 tree parm;
3848 if (adj->op == IPA_PARM_OP_NEW)
3849 parm = NULL;
3850 else
3851 parm = oparms[adj->base_index];
3852 adj->base = parm;
3853
3854 if (adj->op == IPA_PARM_OP_COPY)
3855 {
3856 if (care_for_types)
3857 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3858 new_arg_types);
3859 *link = parm;
3860 link = &DECL_CHAIN (parm);
3861 }
3862 else if (adj->op != IPA_PARM_OP_REMOVE)
3863 {
3864 tree new_parm;
3865 tree ptype;
3866
3867 if (adj->by_ref)
3868 ptype = build_pointer_type (adj->type);
3869 else
3870 {
3871 ptype = adj->type;
3872 if (is_gimple_reg_type (ptype))
3873 {
3874 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3875 if (TYPE_ALIGN (ptype) < malign)
3876 ptype = build_aligned_type (ptype, malign);
3877 }
3878 }
3879
3880 if (care_for_types)
3881 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3882
3883 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3884 ptype);
3885 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3886 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3887 DECL_ARTIFICIAL (new_parm) = 1;
3888 DECL_ARG_TYPE (new_parm) = ptype;
3889 DECL_CONTEXT (new_parm) = fndecl;
3890 TREE_USED (new_parm) = 1;
3891 DECL_IGNORED_P (new_parm) = 1;
3892 layout_decl (new_parm, 0);
3893
3894 if (adj->op == IPA_PARM_OP_NEW)
3895 adj->base = NULL;
3896 else
3897 adj->base = parm;
3898 adj->new_decl = new_parm;
3899
3900 *link = new_parm;
3901 link = &DECL_CHAIN (new_parm);
3902 }
3903 }
3904
3905 *link = NULL_TREE;
3906
3907 tree new_reversed = NULL;
3908 if (care_for_types)
3909 {
3910 new_reversed = nreverse (new_arg_types);
3911 if (last_parm_void)
3912 {
3913 if (new_reversed)
3914 TREE_CHAIN (new_arg_types) = void_list_node;
3915 else
3916 new_reversed = void_list_node;
3917 }
3918 }
3919
3920 /* Use copy_node to preserve as much as possible from original type
3921 (debug info, attribute lists etc.)
3922 Exception is METHOD_TYPEs must have THIS argument.
3923 When we are asked to remove it, we need to build new FUNCTION_TYPE
3924 instead. */
3925 tree new_type = NULL;
3926 if (TREE_CODE (orig_type) != METHOD_TYPE
3927 || (adjustments[0].op == IPA_PARM_OP_COPY
3928 && adjustments[0].base_index == 0))
3929 {
3930 new_type = build_distinct_type_copy (orig_type);
3931 TYPE_ARG_TYPES (new_type) = new_reversed;
3932 }
3933 else
3934 {
3935 new_type
3936 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3937 new_reversed));
3938 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3939 DECL_VINDEX (fndecl) = NULL_TREE;
3940 }
3941
3942 /* When signature changes, we need to clear builtin info. */
3943 if (DECL_BUILT_IN (fndecl))
3944 {
3945 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3946 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3947 }
3948
3949 /* This is a new type, not a copy of an old type. Need to reassociate
3950 variants. We can handle everything except the main variant lazily. */
3951 tree t = TYPE_MAIN_VARIANT (orig_type);
3952 if (orig_type != t)
3953 {
3954 TYPE_MAIN_VARIANT (new_type) = t;
3955 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3956 TYPE_NEXT_VARIANT (t) = new_type;
3957 }
3958 else
3959 {
3960 TYPE_MAIN_VARIANT (new_type) = new_type;
3961 TYPE_NEXT_VARIANT (new_type) = NULL;
3962 }
3963
3964 TREE_TYPE (fndecl) = new_type;
3965 DECL_VIRTUAL_P (fndecl) = 0;
3966 DECL_LANG_SPECIFIC (fndecl) = NULL;
3967 otypes.release ();
3968 oparms.release ();
3969 }
3970
3971 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3972 If this is a directly recursive call, CS must be NULL. Otherwise it must
3973 contain the corresponding call graph edge. */
3974
3975 void
3976 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3977 ipa_parm_adjustment_vec adjustments)
3978 {
3979 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3980 vec<tree> vargs;
3981 vec<tree, va_gc> **debug_args = NULL;
3982 gimple new_stmt;
3983 gimple_stmt_iterator gsi, prev_gsi;
3984 tree callee_decl;
3985 int i, len;
3986
3987 len = adjustments.length ();
3988 vargs.create (len);
3989 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3990 current_node->remove_stmt_references (stmt);
3991
3992 gsi = gsi_for_stmt (stmt);
3993 prev_gsi = gsi;
3994 gsi_prev (&prev_gsi);
3995 for (i = 0; i < len; i++)
3996 {
3997 struct ipa_parm_adjustment *adj;
3998
3999 adj = &adjustments[i];
4000
4001 if (adj->op == IPA_PARM_OP_COPY)
4002 {
4003 tree arg = gimple_call_arg (stmt, adj->base_index);
4004
4005 vargs.quick_push (arg);
4006 }
4007 else if (adj->op != IPA_PARM_OP_REMOVE)
4008 {
4009 tree expr, base, off;
4010 location_t loc;
4011 unsigned int deref_align = 0;
4012 bool deref_base = false;
4013
4014 /* We create a new parameter out of the value of the old one, we can
4015 do the following kind of transformations:
4016
4017 - A scalar passed by reference is converted to a scalar passed by
4018 value. (adj->by_ref is false and the type of the original
4019 actual argument is a pointer to a scalar).
4020
4021 - A part of an aggregate is passed instead of the whole aggregate.
4022 The part can be passed either by value or by reference, this is
4023 determined by value of adj->by_ref. Moreover, the code below
4024 handles both situations when the original aggregate is passed by
4025 value (its type is not a pointer) and when it is passed by
4026 reference (it is a pointer to an aggregate).
4027
4028 When the new argument is passed by reference (adj->by_ref is true)
4029 it must be a part of an aggregate and therefore we form it by
4030 simply taking the address of a reference inside the original
4031 aggregate. */
4032
4033 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4034 base = gimple_call_arg (stmt, adj->base_index);
4035 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4036 : EXPR_LOCATION (base);
4037
4038 if (TREE_CODE (base) != ADDR_EXPR
4039 && POINTER_TYPE_P (TREE_TYPE (base)))
4040 off = build_int_cst (adj->alias_ptr_type,
4041 adj->offset / BITS_PER_UNIT);
4042 else
4043 {
4044 HOST_WIDE_INT base_offset;
4045 tree prev_base;
4046 bool addrof;
4047
4048 if (TREE_CODE (base) == ADDR_EXPR)
4049 {
4050 base = TREE_OPERAND (base, 0);
4051 addrof = true;
4052 }
4053 else
4054 addrof = false;
4055 prev_base = base;
4056 base = get_addr_base_and_unit_offset (base, &base_offset);
4057 /* Aggregate arguments can have non-invariant addresses. */
4058 if (!base)
4059 {
4060 base = build_fold_addr_expr (prev_base);
4061 off = build_int_cst (adj->alias_ptr_type,
4062 adj->offset / BITS_PER_UNIT);
4063 }
4064 else if (TREE_CODE (base) == MEM_REF)
4065 {
4066 if (!addrof)
4067 {
4068 deref_base = true;
4069 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4070 }
4071 off = build_int_cst (adj->alias_ptr_type,
4072 base_offset
4073 + adj->offset / BITS_PER_UNIT);
4074 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4075 off);
4076 base = TREE_OPERAND (base, 0);
4077 }
4078 else
4079 {
4080 off = build_int_cst (adj->alias_ptr_type,
4081 base_offset
4082 + adj->offset / BITS_PER_UNIT);
4083 base = build_fold_addr_expr (base);
4084 }
4085 }
4086
4087 if (!adj->by_ref)
4088 {
4089 tree type = adj->type;
4090 unsigned int align;
4091 unsigned HOST_WIDE_INT misalign;
4092
4093 if (deref_base)
4094 {
4095 align = deref_align;
4096 misalign = 0;
4097 }
4098 else
4099 {
4100 get_pointer_alignment_1 (base, &align, &misalign);
4101 if (TYPE_ALIGN (type) > align)
4102 align = TYPE_ALIGN (type);
4103 }
4104 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4105 * BITS_PER_UNIT);
4106 misalign = misalign & (align - 1);
4107 if (misalign != 0)
4108 align = (misalign & -misalign);
4109 if (align < TYPE_ALIGN (type))
4110 type = build_aligned_type (type, align);
4111 base = force_gimple_operand_gsi (&gsi, base,
4112 true, NULL, true, GSI_SAME_STMT);
4113 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4114 /* If expr is not a valid gimple call argument emit
4115 a load into a temporary. */
4116 if (is_gimple_reg_type (TREE_TYPE (expr)))
4117 {
4118 gimple tem = gimple_build_assign (NULL_TREE, expr);
4119 if (gimple_in_ssa_p (cfun))
4120 {
4121 gimple_set_vuse (tem, gimple_vuse (stmt));
4122 expr = make_ssa_name (TREE_TYPE (expr), tem);
4123 }
4124 else
4125 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4126 gimple_assign_set_lhs (tem, expr);
4127 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4128 }
4129 }
4130 else
4131 {
4132 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4133 expr = build_fold_addr_expr (expr);
4134 expr = force_gimple_operand_gsi (&gsi, expr,
4135 true, NULL, true, GSI_SAME_STMT);
4136 }
4137 vargs.quick_push (expr);
4138 }
4139 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4140 {
4141 unsigned int ix;
4142 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4143 gimple def_temp;
4144
4145 arg = gimple_call_arg (stmt, adj->base_index);
4146 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4147 {
4148 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4149 continue;
4150 arg = fold_convert_loc (gimple_location (stmt),
4151 TREE_TYPE (origin), arg);
4152 }
4153 if (debug_args == NULL)
4154 debug_args = decl_debug_args_insert (callee_decl);
4155 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4156 if (ddecl == origin)
4157 {
4158 ddecl = (**debug_args)[ix + 1];
4159 break;
4160 }
4161 if (ddecl == NULL)
4162 {
4163 ddecl = make_node (DEBUG_EXPR_DECL);
4164 DECL_ARTIFICIAL (ddecl) = 1;
4165 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4166 DECL_MODE (ddecl) = DECL_MODE (origin);
4167
4168 vec_safe_push (*debug_args, origin);
4169 vec_safe_push (*debug_args, ddecl);
4170 }
4171 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4172 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4173 }
4174 }
4175
4176 if (dump_file && (dump_flags & TDF_DETAILS))
4177 {
4178 fprintf (dump_file, "replacing stmt:");
4179 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4180 }
4181
4182 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4183 vargs.release ();
4184 if (gimple_call_lhs (stmt))
4185 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4186
4187 gimple_set_block (new_stmt, gimple_block (stmt));
4188 if (gimple_has_location (stmt))
4189 gimple_set_location (new_stmt, gimple_location (stmt));
4190 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4191 gimple_call_copy_flags (new_stmt, stmt);
4192 if (gimple_in_ssa_p (cfun))
4193 {
4194 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4195 if (gimple_vdef (stmt))
4196 {
4197 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4198 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4199 }
4200 }
4201
4202 if (dump_file && (dump_flags & TDF_DETAILS))
4203 {
4204 fprintf (dump_file, "with stmt:");
4205 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4206 fprintf (dump_file, "\n");
4207 }
4208 gsi_replace (&gsi, new_stmt, true);
4209 if (cs)
4210 cgraph_set_call_stmt (cs, new_stmt);
4211 do
4212 {
4213 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
4214 gsi_prev (&gsi);
4215 }
4216 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4217 }
4218
4219 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4220 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4221 specifies whether the function should care about type incompatibility the
4222 current and new expressions. If it is false, the function will leave
4223 incompatibility issues to the caller. Return true iff the expression
4224 was modified. */
4225
4226 bool
4227 ipa_modify_expr (tree *expr, bool convert,
4228 ipa_parm_adjustment_vec adjustments)
4229 {
4230 struct ipa_parm_adjustment *cand
4231 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4232 if (!cand)
4233 return false;
4234
4235 tree src;
4236 if (cand->by_ref)
4237 src = build_simple_mem_ref (cand->new_decl);
4238 else
4239 src = cand->new_decl;
4240
4241 if (dump_file && (dump_flags & TDF_DETAILS))
4242 {
4243 fprintf (dump_file, "About to replace expr ");
4244 print_generic_expr (dump_file, *expr, 0);
4245 fprintf (dump_file, " with ");
4246 print_generic_expr (dump_file, src, 0);
4247 fprintf (dump_file, "\n");
4248 }
4249
4250 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4251 {
4252 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4253 *expr = vce;
4254 }
4255 else
4256 *expr = src;
4257 return true;
4258 }
4259
4260 /* If T is an SSA_NAME, return NULL if it is not a default def or
4261 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4262 the base variable is always returned, regardless if it is a default
4263 def. Return T if it is not an SSA_NAME. */
4264
4265 static tree
4266 get_ssa_base_param (tree t, bool ignore_default_def)
4267 {
4268 if (TREE_CODE (t) == SSA_NAME)
4269 {
4270 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4271 return SSA_NAME_VAR (t);
4272 else
4273 return NULL_TREE;
4274 }
4275 return t;
4276 }
4277
4278 /* Given an expression, return an adjustment entry specifying the
4279 transformation to be done on EXPR. If no suitable adjustment entry
4280 was found, returns NULL.
4281
4282 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4283 default def, otherwise bail on them.
4284
4285 If CONVERT is non-NULL, this function will set *CONVERT if the
4286 expression provided is a component reference. ADJUSTMENTS is the
4287 adjustments vector. */
4288
4289 ipa_parm_adjustment *
4290 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4291 ipa_parm_adjustment_vec adjustments,
4292 bool ignore_default_def)
4293 {
4294 if (TREE_CODE (**expr) == BIT_FIELD_REF
4295 || TREE_CODE (**expr) == IMAGPART_EXPR
4296 || TREE_CODE (**expr) == REALPART_EXPR)
4297 {
4298 *expr = &TREE_OPERAND (**expr, 0);
4299 if (convert)
4300 *convert = true;
4301 }
4302
4303 HOST_WIDE_INT offset, size, max_size;
4304 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4305 if (!base || size == -1 || max_size == -1)
4306 return NULL;
4307
4308 if (TREE_CODE (base) == MEM_REF)
4309 {
4310 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4311 base = TREE_OPERAND (base, 0);
4312 }
4313
4314 base = get_ssa_base_param (base, ignore_default_def);
4315 if (!base || TREE_CODE (base) != PARM_DECL)
4316 return NULL;
4317
4318 struct ipa_parm_adjustment *cand = NULL;
4319 unsigned int len = adjustments.length ();
4320 for (unsigned i = 0; i < len; i++)
4321 {
4322 struct ipa_parm_adjustment *adj = &adjustments[i];
4323
4324 if (adj->base == base
4325 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4326 {
4327 cand = adj;
4328 break;
4329 }
4330 }
4331
4332 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4333 return NULL;
4334 return cand;
4335 }
4336
4337 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4338
4339 static bool
4340 index_in_adjustments_multiple_times_p (int base_index,
4341 ipa_parm_adjustment_vec adjustments)
4342 {
4343 int i, len = adjustments.length ();
4344 bool one = false;
4345
4346 for (i = 0; i < len; i++)
4347 {
4348 struct ipa_parm_adjustment *adj;
4349 adj = &adjustments[i];
4350
4351 if (adj->base_index == base_index)
4352 {
4353 if (one)
4354 return true;
4355 else
4356 one = true;
4357 }
4358 }
4359 return false;
4360 }
4361
4362
4363 /* Return adjustments that should have the same effect on function parameters
4364 and call arguments as if they were first changed according to adjustments in
4365 INNER and then by adjustments in OUTER. */
4366
4367 ipa_parm_adjustment_vec
4368 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4369 ipa_parm_adjustment_vec outer)
4370 {
4371 int i, outlen = outer.length ();
4372 int inlen = inner.length ();
4373 int removals = 0;
4374 ipa_parm_adjustment_vec adjustments, tmp;
4375
4376 tmp.create (inlen);
4377 for (i = 0; i < inlen; i++)
4378 {
4379 struct ipa_parm_adjustment *n;
4380 n = &inner[i];
4381
4382 if (n->op == IPA_PARM_OP_REMOVE)
4383 removals++;
4384 else
4385 {
4386 /* FIXME: Handling of new arguments are not implemented yet. */
4387 gcc_assert (n->op != IPA_PARM_OP_NEW);
4388 tmp.quick_push (*n);
4389 }
4390 }
4391
4392 adjustments.create (outlen + removals);
4393 for (i = 0; i < outlen; i++)
4394 {
4395 struct ipa_parm_adjustment r;
4396 struct ipa_parm_adjustment *out = &outer[i];
4397 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4398
4399 memset (&r, 0, sizeof (r));
4400 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4401 if (out->op == IPA_PARM_OP_REMOVE)
4402 {
4403 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4404 {
4405 r.op = IPA_PARM_OP_REMOVE;
4406 adjustments.quick_push (r);
4407 }
4408 continue;
4409 }
4410 else
4411 {
4412 /* FIXME: Handling of new arguments are not implemented yet. */
4413 gcc_assert (out->op != IPA_PARM_OP_NEW);
4414 }
4415
4416 r.base_index = in->base_index;
4417 r.type = out->type;
4418
4419 /* FIXME: Create nonlocal value too. */
4420
4421 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4422 r.op = IPA_PARM_OP_COPY;
4423 else if (in->op == IPA_PARM_OP_COPY)
4424 r.offset = out->offset;
4425 else if (out->op == IPA_PARM_OP_COPY)
4426 r.offset = in->offset;
4427 else
4428 r.offset = in->offset + out->offset;
4429 adjustments.quick_push (r);
4430 }
4431
4432 for (i = 0; i < inlen; i++)
4433 {
4434 struct ipa_parm_adjustment *n = &inner[i];
4435
4436 if (n->op == IPA_PARM_OP_REMOVE)
4437 adjustments.quick_push (*n);
4438 }
4439
4440 tmp.release ();
4441 return adjustments;
4442 }
4443
4444 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4445 friendly way, assuming they are meant to be applied to FNDECL. */
4446
4447 void
4448 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4449 tree fndecl)
4450 {
4451 int i, len = adjustments.length ();
4452 bool first = true;
4453 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4454
4455 fprintf (file, "IPA param adjustments: ");
4456 for (i = 0; i < len; i++)
4457 {
4458 struct ipa_parm_adjustment *adj;
4459 adj = &adjustments[i];
4460
4461 if (!first)
4462 fprintf (file, " ");
4463 else
4464 first = false;
4465
4466 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4467 print_generic_expr (file, parms[adj->base_index], 0);
4468 if (adj->base)
4469 {
4470 fprintf (file, ", base: ");
4471 print_generic_expr (file, adj->base, 0);
4472 }
4473 if (adj->new_decl)
4474 {
4475 fprintf (file, ", new_decl: ");
4476 print_generic_expr (file, adj->new_decl, 0);
4477 }
4478 if (adj->new_ssa_base)
4479 {
4480 fprintf (file, ", new_ssa_base: ");
4481 print_generic_expr (file, adj->new_ssa_base, 0);
4482 }
4483
4484 if (adj->op == IPA_PARM_OP_COPY)
4485 fprintf (file, ", copy_param");
4486 else if (adj->op == IPA_PARM_OP_REMOVE)
4487 fprintf (file, ", remove_param");
4488 else
4489 fprintf (file, ", offset %li", (long) adj->offset);
4490 if (adj->by_ref)
4491 fprintf (file, ", by_ref");
4492 print_node_brief (file, ", type: ", adj->type, 0);
4493 fprintf (file, "\n");
4494 }
4495 parms.release ();
4496 }
4497
4498 /* Dump the AV linked list. */
4499
4500 void
4501 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4502 {
4503 bool comma = false;
4504 fprintf (f, " Aggregate replacements:");
4505 for (; av; av = av->next)
4506 {
4507 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4508 av->index, av->offset);
4509 print_generic_expr (f, av->value, 0);
4510 comma = true;
4511 }
4512 fprintf (f, "\n");
4513 }
4514
4515 /* Stream out jump function JUMP_FUNC to OB. */
4516
4517 static void
4518 ipa_write_jump_function (struct output_block *ob,
4519 struct ipa_jump_func *jump_func)
4520 {
4521 struct ipa_agg_jf_item *item;
4522 struct bitpack_d bp;
4523 int i, count;
4524
4525 streamer_write_uhwi (ob, jump_func->type);
4526 switch (jump_func->type)
4527 {
4528 case IPA_JF_UNKNOWN:
4529 break;
4530 case IPA_JF_KNOWN_TYPE:
4531 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4532 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4533 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4534 break;
4535 case IPA_JF_CONST:
4536 gcc_assert (
4537 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4538 stream_write_tree (ob, jump_func->value.constant.value, true);
4539 break;
4540 case IPA_JF_PASS_THROUGH:
4541 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4542 if (jump_func->value.pass_through.operation == NOP_EXPR)
4543 {
4544 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4545 bp = bitpack_create (ob->main_stream);
4546 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4547 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4548 streamer_write_bitpack (&bp);
4549 }
4550 else
4551 {
4552 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4553 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4554 }
4555 break;
4556 case IPA_JF_ANCESTOR:
4557 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4558 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4559 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4560 bp = bitpack_create (ob->main_stream);
4561 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4562 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4563 streamer_write_bitpack (&bp);
4564 break;
4565 }
4566
4567 count = vec_safe_length (jump_func->agg.items);
4568 streamer_write_uhwi (ob, count);
4569 if (count)
4570 {
4571 bp = bitpack_create (ob->main_stream);
4572 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4573 streamer_write_bitpack (&bp);
4574 }
4575
4576 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4577 {
4578 streamer_write_uhwi (ob, item->offset);
4579 stream_write_tree (ob, item->value, true);
4580 }
4581 }
4582
4583 /* Read in jump function JUMP_FUNC from IB. */
4584
4585 static void
4586 ipa_read_jump_function (struct lto_input_block *ib,
4587 struct ipa_jump_func *jump_func,
4588 struct cgraph_edge *cs,
4589 struct data_in *data_in)
4590 {
4591 enum jump_func_type jftype;
4592 enum tree_code operation;
4593 int i, count;
4594
4595 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4596 switch (jftype)
4597 {
4598 case IPA_JF_UNKNOWN:
4599 jump_func->type = IPA_JF_UNKNOWN;
4600 break;
4601 case IPA_JF_KNOWN_TYPE:
4602 {
4603 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4604 tree base_type = stream_read_tree (ib, data_in);
4605 tree component_type = stream_read_tree (ib, data_in);
4606
4607 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4608 break;
4609 }
4610 case IPA_JF_CONST:
4611 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4612 break;
4613 case IPA_JF_PASS_THROUGH:
4614 operation = (enum tree_code) streamer_read_uhwi (ib);
4615 if (operation == NOP_EXPR)
4616 {
4617 int formal_id = streamer_read_uhwi (ib);
4618 struct bitpack_d bp = streamer_read_bitpack (ib);
4619 bool agg_preserved = bp_unpack_value (&bp, 1);
4620 bool type_preserved = bp_unpack_value (&bp, 1);
4621 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4622 type_preserved);
4623 }
4624 else
4625 {
4626 tree operand = stream_read_tree (ib, data_in);
4627 int formal_id = streamer_read_uhwi (ib);
4628 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4629 operation);
4630 }
4631 break;
4632 case IPA_JF_ANCESTOR:
4633 {
4634 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4635 tree type = stream_read_tree (ib, data_in);
4636 int formal_id = streamer_read_uhwi (ib);
4637 struct bitpack_d bp = streamer_read_bitpack (ib);
4638 bool agg_preserved = bp_unpack_value (&bp, 1);
4639 bool type_preserved = bp_unpack_value (&bp, 1);
4640
4641 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4642 type_preserved);
4643 break;
4644 }
4645 }
4646
4647 count = streamer_read_uhwi (ib);
4648 vec_alloc (jump_func->agg.items, count);
4649 if (count)
4650 {
4651 struct bitpack_d bp = streamer_read_bitpack (ib);
4652 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4653 }
4654 for (i = 0; i < count; i++)
4655 {
4656 struct ipa_agg_jf_item item;
4657 item.offset = streamer_read_uhwi (ib);
4658 item.value = stream_read_tree (ib, data_in);
4659 jump_func->agg.items->quick_push (item);
4660 }
4661 }
4662
4663 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4664 relevant to indirect inlining to OB. */
4665
4666 static void
4667 ipa_write_indirect_edge_info (struct output_block *ob,
4668 struct cgraph_edge *cs)
4669 {
4670 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4671 struct bitpack_d bp;
4672
4673 streamer_write_hwi (ob, ii->param_index);
4674 streamer_write_hwi (ob, ii->offset);
4675 bp = bitpack_create (ob->main_stream);
4676 bp_pack_value (&bp, ii->polymorphic, 1);
4677 bp_pack_value (&bp, ii->agg_contents, 1);
4678 bp_pack_value (&bp, ii->member_ptr, 1);
4679 bp_pack_value (&bp, ii->by_ref, 1);
4680 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4681 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4682 streamer_write_bitpack (&bp);
4683
4684 if (ii->polymorphic)
4685 {
4686 streamer_write_hwi (ob, ii->otr_token);
4687 stream_write_tree (ob, ii->otr_type, true);
4688 stream_write_tree (ob, ii->outer_type, true);
4689 }
4690 }
4691
4692 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4693 relevant to indirect inlining from IB. */
4694
4695 static void
4696 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4697 struct data_in *data_in ATTRIBUTE_UNUSED,
4698 struct cgraph_edge *cs)
4699 {
4700 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4701 struct bitpack_d bp;
4702
4703 ii->param_index = (int) streamer_read_hwi (ib);
4704 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4705 bp = streamer_read_bitpack (ib);
4706 ii->polymorphic = bp_unpack_value (&bp, 1);
4707 ii->agg_contents = bp_unpack_value (&bp, 1);
4708 ii->member_ptr = bp_unpack_value (&bp, 1);
4709 ii->by_ref = bp_unpack_value (&bp, 1);
4710 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4711 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4712 if (ii->polymorphic)
4713 {
4714 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4715 ii->otr_type = stream_read_tree (ib, data_in);
4716 ii->outer_type = stream_read_tree (ib, data_in);
4717 }
4718 }
4719
4720 /* Stream out NODE info to OB. */
4721
4722 static void
4723 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4724 {
4725 int node_ref;
4726 lto_symtab_encoder_t encoder;
4727 struct ipa_node_params *info = IPA_NODE_REF (node);
4728 int j;
4729 struct cgraph_edge *e;
4730 struct bitpack_d bp;
4731
4732 encoder = ob->decl_state->symtab_node_encoder;
4733 node_ref = lto_symtab_encoder_encode (encoder, node);
4734 streamer_write_uhwi (ob, node_ref);
4735
4736 streamer_write_uhwi (ob, ipa_get_param_count (info));
4737 for (j = 0; j < ipa_get_param_count (info); j++)
4738 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4739 bp = bitpack_create (ob->main_stream);
4740 gcc_assert (info->analysis_done
4741 || ipa_get_param_count (info) == 0);
4742 gcc_assert (!info->node_enqueued);
4743 gcc_assert (!info->ipcp_orig_node);
4744 for (j = 0; j < ipa_get_param_count (info); j++)
4745 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4746 streamer_write_bitpack (&bp);
4747 for (j = 0; j < ipa_get_param_count (info); j++)
4748 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4749 for (e = node->callees; e; e = e->next_callee)
4750 {
4751 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4752
4753 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4754 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4755 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4756 }
4757 for (e = node->indirect_calls; e; e = e->next_callee)
4758 {
4759 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4760
4761 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4762 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4763 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4764 ipa_write_indirect_edge_info (ob, e);
4765 }
4766 }
4767
4768 /* Stream in NODE info from IB. */
4769
4770 static void
4771 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4772 struct data_in *data_in)
4773 {
4774 struct ipa_node_params *info = IPA_NODE_REF (node);
4775 int k;
4776 struct cgraph_edge *e;
4777 struct bitpack_d bp;
4778
4779 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4780
4781 for (k = 0; k < ipa_get_param_count (info); k++)
4782 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4783
4784 bp = streamer_read_bitpack (ib);
4785 if (ipa_get_param_count (info) != 0)
4786 info->analysis_done = true;
4787 info->node_enqueued = false;
4788 for (k = 0; k < ipa_get_param_count (info); k++)
4789 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4790 for (k = 0; k < ipa_get_param_count (info); k++)
4791 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4792 for (e = node->callees; e; e = e->next_callee)
4793 {
4794 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4795 int count = streamer_read_uhwi (ib);
4796
4797 if (!count)
4798 continue;
4799 vec_safe_grow_cleared (args->jump_functions, count);
4800
4801 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4802 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4803 data_in);
4804 }
4805 for (e = node->indirect_calls; e; e = e->next_callee)
4806 {
4807 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4808 int count = streamer_read_uhwi (ib);
4809
4810 if (count)
4811 {
4812 vec_safe_grow_cleared (args->jump_functions, count);
4813 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4814 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4815 data_in);
4816 }
4817 ipa_read_indirect_edge_info (ib, data_in, e);
4818 }
4819 }
4820
4821 /* Write jump functions for nodes in SET. */
4822
4823 void
4824 ipa_prop_write_jump_functions (void)
4825 {
4826 struct cgraph_node *node;
4827 struct output_block *ob;
4828 unsigned int count = 0;
4829 lto_symtab_encoder_iterator lsei;
4830 lto_symtab_encoder_t encoder;
4831
4832
4833 if (!ipa_node_params_vector.exists ())
4834 return;
4835
4836 ob = create_output_block (LTO_section_jump_functions);
4837 encoder = ob->decl_state->symtab_node_encoder;
4838 ob->cgraph_node = NULL;
4839 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4840 lsei_next_function_in_partition (&lsei))
4841 {
4842 node = lsei_cgraph_node (lsei);
4843 if (cgraph_function_with_gimple_body_p (node)
4844 && IPA_NODE_REF (node) != NULL)
4845 count++;
4846 }
4847
4848 streamer_write_uhwi (ob, count);
4849
4850 /* Process all of the functions. */
4851 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4852 lsei_next_function_in_partition (&lsei))
4853 {
4854 node = lsei_cgraph_node (lsei);
4855 if (cgraph_function_with_gimple_body_p (node)
4856 && IPA_NODE_REF (node) != NULL)
4857 ipa_write_node_info (ob, node);
4858 }
4859 streamer_write_char_stream (ob->main_stream, 0);
4860 produce_asm (ob, NULL);
4861 destroy_output_block (ob);
4862 }
4863
4864 /* Read section in file FILE_DATA of length LEN with data DATA. */
4865
4866 static void
4867 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4868 size_t len)
4869 {
4870 const struct lto_function_header *header =
4871 (const struct lto_function_header *) data;
4872 const int cfg_offset = sizeof (struct lto_function_header);
4873 const int main_offset = cfg_offset + header->cfg_size;
4874 const int string_offset = main_offset + header->main_size;
4875 struct data_in *data_in;
4876 struct lto_input_block ib_main;
4877 unsigned int i;
4878 unsigned int count;
4879
4880 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4881 header->main_size);
4882
4883 data_in =
4884 lto_data_in_create (file_data, (const char *) data + string_offset,
4885 header->string_size, vNULL);
4886 count = streamer_read_uhwi (&ib_main);
4887
4888 for (i = 0; i < count; i++)
4889 {
4890 unsigned int index;
4891 struct cgraph_node *node;
4892 lto_symtab_encoder_t encoder;
4893
4894 index = streamer_read_uhwi (&ib_main);
4895 encoder = file_data->symtab_node_encoder;
4896 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4897 gcc_assert (node->definition);
4898 ipa_read_node_info (&ib_main, node, data_in);
4899 }
4900 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4901 len);
4902 lto_data_in_delete (data_in);
4903 }
4904
4905 /* Read ipcp jump functions. */
4906
4907 void
4908 ipa_prop_read_jump_functions (void)
4909 {
4910 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4911 struct lto_file_decl_data *file_data;
4912 unsigned int j = 0;
4913
4914 ipa_check_create_node_params ();
4915 ipa_check_create_edge_args ();
4916 ipa_register_cgraph_hooks ();
4917
4918 while ((file_data = file_data_vec[j++]))
4919 {
4920 size_t len;
4921 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4922
4923 if (data)
4924 ipa_prop_read_section (file_data, data, len);
4925 }
4926 }
4927
4928 /* After merging units, we can get mismatch in argument counts.
4929 Also decl merging might've rendered parameter lists obsolete.
4930 Also compute called_with_variable_arg info. */
4931
4932 void
4933 ipa_update_after_lto_read (void)
4934 {
4935 ipa_check_create_node_params ();
4936 ipa_check_create_edge_args ();
4937 }
4938
4939 void
4940 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4941 {
4942 int node_ref;
4943 unsigned int count = 0;
4944 lto_symtab_encoder_t encoder;
4945 struct ipa_agg_replacement_value *aggvals, *av;
4946
4947 aggvals = ipa_get_agg_replacements_for_node (node);
4948 encoder = ob->decl_state->symtab_node_encoder;
4949 node_ref = lto_symtab_encoder_encode (encoder, node);
4950 streamer_write_uhwi (ob, node_ref);
4951
4952 for (av = aggvals; av; av = av->next)
4953 count++;
4954 streamer_write_uhwi (ob, count);
4955
4956 for (av = aggvals; av; av = av->next)
4957 {
4958 struct bitpack_d bp;
4959
4960 streamer_write_uhwi (ob, av->offset);
4961 streamer_write_uhwi (ob, av->index);
4962 stream_write_tree (ob, av->value, true);
4963
4964 bp = bitpack_create (ob->main_stream);
4965 bp_pack_value (&bp, av->by_ref, 1);
4966 streamer_write_bitpack (&bp);
4967 }
4968 }
4969
4970 /* Stream in the aggregate value replacement chain for NODE from IB. */
4971
4972 static void
4973 read_agg_replacement_chain (struct lto_input_block *ib,
4974 struct cgraph_node *node,
4975 struct data_in *data_in)
4976 {
4977 struct ipa_agg_replacement_value *aggvals = NULL;
4978 unsigned int count, i;
4979
4980 count = streamer_read_uhwi (ib);
4981 for (i = 0; i <count; i++)
4982 {
4983 struct ipa_agg_replacement_value *av;
4984 struct bitpack_d bp;
4985
4986 av = ggc_alloc<ipa_agg_replacement_value> ();
4987 av->offset = streamer_read_uhwi (ib);
4988 av->index = streamer_read_uhwi (ib);
4989 av->value = stream_read_tree (ib, data_in);
4990 bp = streamer_read_bitpack (ib);
4991 av->by_ref = bp_unpack_value (&bp, 1);
4992 av->next = aggvals;
4993 aggvals = av;
4994 }
4995 ipa_set_node_agg_value_chain (node, aggvals);
4996 }
4997
4998 /* Write all aggregate replacement for nodes in set. */
4999
5000 void
5001 ipa_prop_write_all_agg_replacement (void)
5002 {
5003 struct cgraph_node *node;
5004 struct output_block *ob;
5005 unsigned int count = 0;
5006 lto_symtab_encoder_iterator lsei;
5007 lto_symtab_encoder_t encoder;
5008
5009 if (!ipa_node_agg_replacements)
5010 return;
5011
5012 ob = create_output_block (LTO_section_ipcp_transform);
5013 encoder = ob->decl_state->symtab_node_encoder;
5014 ob->cgraph_node = NULL;
5015 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5016 lsei_next_function_in_partition (&lsei))
5017 {
5018 node = lsei_cgraph_node (lsei);
5019 if (cgraph_function_with_gimple_body_p (node)
5020 && ipa_get_agg_replacements_for_node (node) != NULL)
5021 count++;
5022 }
5023
5024 streamer_write_uhwi (ob, count);
5025
5026 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5027 lsei_next_function_in_partition (&lsei))
5028 {
5029 node = lsei_cgraph_node (lsei);
5030 if (cgraph_function_with_gimple_body_p (node)
5031 && ipa_get_agg_replacements_for_node (node) != NULL)
5032 write_agg_replacement_chain (ob, node);
5033 }
5034 streamer_write_char_stream (ob->main_stream, 0);
5035 produce_asm (ob, NULL);
5036 destroy_output_block (ob);
5037 }
5038
5039 /* Read replacements section in file FILE_DATA of length LEN with data
5040 DATA. */
5041
5042 static void
5043 read_replacements_section (struct lto_file_decl_data *file_data,
5044 const char *data,
5045 size_t len)
5046 {
5047 const struct lto_function_header *header =
5048 (const struct lto_function_header *) data;
5049 const int cfg_offset = sizeof (struct lto_function_header);
5050 const int main_offset = cfg_offset + header->cfg_size;
5051 const int string_offset = main_offset + header->main_size;
5052 struct data_in *data_in;
5053 struct lto_input_block ib_main;
5054 unsigned int i;
5055 unsigned int count;
5056
5057 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
5058 header->main_size);
5059
5060 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5061 header->string_size, vNULL);
5062 count = streamer_read_uhwi (&ib_main);
5063
5064 for (i = 0; i < count; i++)
5065 {
5066 unsigned int index;
5067 struct cgraph_node *node;
5068 lto_symtab_encoder_t encoder;
5069
5070 index = streamer_read_uhwi (&ib_main);
5071 encoder = file_data->symtab_node_encoder;
5072 node = cgraph (lto_symtab_encoder_deref (encoder, index));
5073 gcc_assert (node->definition);
5074 read_agg_replacement_chain (&ib_main, node, data_in);
5075 }
5076 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5077 len);
5078 lto_data_in_delete (data_in);
5079 }
5080
5081 /* Read IPA-CP aggregate replacements. */
5082
5083 void
5084 ipa_prop_read_all_agg_replacement (void)
5085 {
5086 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5087 struct lto_file_decl_data *file_data;
5088 unsigned int j = 0;
5089
5090 while ((file_data = file_data_vec[j++]))
5091 {
5092 size_t len;
5093 const char *data = lto_get_section_data (file_data,
5094 LTO_section_ipcp_transform,
5095 NULL, &len);
5096 if (data)
5097 read_replacements_section (file_data, data, len);
5098 }
5099 }
5100
5101 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5102 NODE. */
5103
5104 static void
5105 adjust_agg_replacement_values (struct cgraph_node *node,
5106 struct ipa_agg_replacement_value *aggval)
5107 {
5108 struct ipa_agg_replacement_value *v;
5109 int i, c = 0, d = 0, *adj;
5110
5111 if (!node->clone.combined_args_to_skip)
5112 return;
5113
5114 for (v = aggval; v; v = v->next)
5115 {
5116 gcc_assert (v->index >= 0);
5117 if (c < v->index)
5118 c = v->index;
5119 }
5120 c++;
5121
5122 adj = XALLOCAVEC (int, c);
5123 for (i = 0; i < c; i++)
5124 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5125 {
5126 adj[i] = -1;
5127 d++;
5128 }
5129 else
5130 adj[i] = i - d;
5131
5132 for (v = aggval; v; v = v->next)
5133 v->index = adj[v->index];
5134 }
5135
5136 /* Dominator walker driving the ipcp modification phase. */
5137
5138 class ipcp_modif_dom_walker : public dom_walker
5139 {
5140 public:
5141 ipcp_modif_dom_walker (struct func_body_info *fbi,
5142 vec<ipa_param_descriptor> descs,
5143 struct ipa_agg_replacement_value *av,
5144 bool *sc, bool *cc)
5145 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5146 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5147
5148 virtual void before_dom_children (basic_block);
5149
5150 private:
5151 struct func_body_info *m_fbi;
5152 vec<ipa_param_descriptor> m_descriptors;
5153 struct ipa_agg_replacement_value *m_aggval;
5154 bool *m_something_changed, *m_cfg_changed;
5155 };
5156
5157 void
5158 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5159 {
5160 gimple_stmt_iterator gsi;
5161 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5162 {
5163 struct ipa_agg_replacement_value *v;
5164 gimple stmt = gsi_stmt (gsi);
5165 tree rhs, val, t;
5166 HOST_WIDE_INT offset, size;
5167 int index;
5168 bool by_ref, vce;
5169
5170 if (!gimple_assign_load_p (stmt))
5171 continue;
5172 rhs = gimple_assign_rhs1 (stmt);
5173 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5174 continue;
5175
5176 vce = false;
5177 t = rhs;
5178 while (handled_component_p (t))
5179 {
5180 /* V_C_E can do things like convert an array of integers to one
5181 bigger integer and similar things we do not handle below. */
5182 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5183 {
5184 vce = true;
5185 break;
5186 }
5187 t = TREE_OPERAND (t, 0);
5188 }
5189 if (vce)
5190 continue;
5191
5192 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5193 &offset, &size, &by_ref))
5194 continue;
5195 for (v = m_aggval; v; v = v->next)
5196 if (v->index == index
5197 && v->offset == offset)
5198 break;
5199 if (!v
5200 || v->by_ref != by_ref
5201 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5202 continue;
5203
5204 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5205 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5206 {
5207 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5208 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5209 else if (TYPE_SIZE (TREE_TYPE (rhs))
5210 == TYPE_SIZE (TREE_TYPE (v->value)))
5211 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5212 else
5213 {
5214 if (dump_file)
5215 {
5216 fprintf (dump_file, " const ");
5217 print_generic_expr (dump_file, v->value, 0);
5218 fprintf (dump_file, " can't be converted to type of ");
5219 print_generic_expr (dump_file, rhs, 0);
5220 fprintf (dump_file, "\n");
5221 }
5222 continue;
5223 }
5224 }
5225 else
5226 val = v->value;
5227
5228 if (dump_file && (dump_flags & TDF_DETAILS))
5229 {
5230 fprintf (dump_file, "Modifying stmt:\n ");
5231 print_gimple_stmt (dump_file, stmt, 0, 0);
5232 }
5233 gimple_assign_set_rhs_from_tree (&gsi, val);
5234 update_stmt (stmt);
5235
5236 if (dump_file && (dump_flags & TDF_DETAILS))
5237 {
5238 fprintf (dump_file, "into:\n ");
5239 print_gimple_stmt (dump_file, stmt, 0, 0);
5240 fprintf (dump_file, "\n");
5241 }
5242
5243 *m_something_changed = true;
5244 if (maybe_clean_eh_stmt (stmt)
5245 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5246 *m_cfg_changed = true;
5247 }
5248
5249 }
5250
5251 /* IPCP transformation phase doing propagation of aggregate values. */
5252
5253 unsigned int
5254 ipcp_transform_function (struct cgraph_node *node)
5255 {
5256 vec<ipa_param_descriptor> descriptors = vNULL;
5257 struct func_body_info fbi;
5258 struct ipa_agg_replacement_value *aggval;
5259 int param_count;
5260 bool cfg_changed = false, something_changed = false;
5261
5262 gcc_checking_assert (cfun);
5263 gcc_checking_assert (current_function_decl);
5264
5265 if (dump_file)
5266 fprintf (dump_file, "Modification phase of node %s/%i\n",
5267 node->name (), node->order);
5268
5269 aggval = ipa_get_agg_replacements_for_node (node);
5270 if (!aggval)
5271 return 0;
5272 param_count = count_formal_params (node->decl);
5273 if (param_count == 0)
5274 return 0;
5275 adjust_agg_replacement_values (node, aggval);
5276 if (dump_file)
5277 ipa_dump_agg_replacement_values (dump_file, aggval);
5278
5279 fbi.node = node;
5280 fbi.info = NULL;
5281 fbi.bb_infos = vNULL;
5282 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5283 fbi.param_count = param_count;
5284 fbi.aa_walked = 0;
5285
5286 descriptors.safe_grow_cleared (param_count);
5287 ipa_populate_param_decls (node, descriptors);
5288 calculate_dominance_info (CDI_DOMINATORS);
5289 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5290 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5291
5292 int i;
5293 struct ipa_bb_info *bi;
5294 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5295 free_ipa_bb_info (bi);
5296 fbi.bb_infos.release ();
5297 free_dominance_info (CDI_DOMINATORS);
5298 (*ipa_node_agg_replacements)[node->uid] = NULL;
5299 descriptors.release ();
5300
5301 if (!something_changed)
5302 return 0;
5303 else if (cfg_changed)
5304 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5305 else
5306 return TODO_update_ssa_only_virtuals;
5307 }