ipa-prop.c (ipa_set_jf_known_type): Record always the main variant of the type.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "dbgcnt.h"
63 #include "domwalk.h"
64 #include "builtins.h"
65
66 /* Intermediate information that we get from alias analysis about a particular
67 parameter in a particular basic_block. When a parameter or the memory it
68 references is marked modified, we use that information in all dominatd
69 blocks without cosulting alias analysis oracle. */
70
71 struct param_aa_status
72 {
73 /* Set when this structure contains meaningful information. If not, the
74 structure describing a dominating BB should be used instead. */
75 bool valid;
76
77 /* Whether we have seen something which might have modified the data in
78 question. PARM is for the parameter itself, REF is for data it points to
79 but using the alias type of individual accesses and PT is the same thing
80 but for computing aggregate pass-through functions using a very inclusive
81 ao_ref. */
82 bool parm_modified, ref_modified, pt_modified;
83 };
84
85 /* Information related to a given BB that used only when looking at function
86 body. */
87
88 struct ipa_bb_info
89 {
90 /* Call graph edges going out of this BB. */
91 vec<cgraph_edge_p> cg_edges;
92 /* Alias analysis statuses of each formal parameter at this bb. */
93 vec<param_aa_status> param_aa_statuses;
94 };
95
96 /* Structure with global information that is only used when looking at function
97 body. */
98
99 struct func_body_info
100 {
101 /* The node that is being analyzed. */
102 cgraph_node *node;
103
104 /* Its info. */
105 struct ipa_node_params *info;
106
107 /* Information about individual BBs. */
108 vec<ipa_bb_info> bb_infos;
109
110 /* Number of parameters. */
111 int param_count;
112
113 /* Number of statements already walked by when analyzing this function. */
114 unsigned int aa_walked;
115 };
116
117 /* Vector where the parameter infos are actually stored. */
118 vec<ipa_node_params> ipa_node_params_vector;
119 /* Vector of known aggregate values in cloned nodes. */
120 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
121 /* Vector where the parameter infos are actually stored. */
122 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
123
124 /* Holders of ipa cgraph hooks: */
125 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
126 static struct cgraph_node_hook_list *node_removal_hook_holder;
127 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
128 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
129 static struct cgraph_node_hook_list *function_insertion_hook_holder;
130
131 /* Description of a reference to an IPA constant. */
132 struct ipa_cst_ref_desc
133 {
134 /* Edge that corresponds to the statement which took the reference. */
135 struct cgraph_edge *cs;
136 /* Linked list of duplicates created when call graph edges are cloned. */
137 struct ipa_cst_ref_desc *next_duplicate;
138 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
139 if out of control. */
140 int refcount;
141 };
142
143 /* Allocation pool for reference descriptions. */
144
145 static alloc_pool ipa_refdesc_pool;
146
147 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
148 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
149
150 static bool
151 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
152 {
153 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
154 struct cl_optimization *os;
155
156 if (!fs_opts)
157 return false;
158 os = TREE_OPTIMIZATION (fs_opts);
159 return !os->x_optimize || !os->x_flag_ipa_cp;
160 }
161
162 /* Return index of the formal whose tree is PTREE in function which corresponds
163 to INFO. */
164
165 static int
166 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
167 {
168 int i, count;
169
170 count = descriptors.length ();
171 for (i = 0; i < count; i++)
172 if (descriptors[i].decl == ptree)
173 return i;
174
175 return -1;
176 }
177
178 /* Return index of the formal whose tree is PTREE in function which corresponds
179 to INFO. */
180
181 int
182 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
183 {
184 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
185 }
186
187 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
188 NODE. */
189
190 static void
191 ipa_populate_param_decls (struct cgraph_node *node,
192 vec<ipa_param_descriptor> &descriptors)
193 {
194 tree fndecl;
195 tree fnargs;
196 tree parm;
197 int param_num;
198
199 fndecl = node->decl;
200 gcc_assert (gimple_has_body_p (fndecl));
201 fnargs = DECL_ARGUMENTS (fndecl);
202 param_num = 0;
203 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
204 {
205 descriptors[param_num].decl = parm;
206 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
207 param_num++;
208 }
209 }
210
211 /* Return how many formal parameters FNDECL has. */
212
213 static inline int
214 count_formal_params (tree fndecl)
215 {
216 tree parm;
217 int count = 0;
218 gcc_assert (gimple_has_body_p (fndecl));
219
220 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
221 count++;
222
223 return count;
224 }
225
226 /* Return the declaration of Ith formal parameter of the function corresponding
227 to INFO. Note there is no setter function as this array is built just once
228 using ipa_initialize_node_params. */
229
230 void
231 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
232 {
233 fprintf (file, "param #%i", i);
234 if (info->descriptors[i].decl)
235 {
236 fprintf (file, " ");
237 print_generic_expr (file, info->descriptors[i].decl, 0);
238 }
239 }
240
241 /* Initialize the ipa_node_params structure associated with NODE
242 to hold PARAM_COUNT parameters. */
243
244 void
245 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
246 {
247 struct ipa_node_params *info = IPA_NODE_REF (node);
248
249 if (!info->descriptors.exists () && param_count)
250 info->descriptors.safe_grow_cleared (param_count);
251 }
252
253 /* Initialize the ipa_node_params structure associated with NODE by counting
254 the function parameters, creating the descriptors and populating their
255 param_decls. */
256
257 void
258 ipa_initialize_node_params (struct cgraph_node *node)
259 {
260 struct ipa_node_params *info = IPA_NODE_REF (node);
261
262 if (!info->descriptors.exists ())
263 {
264 ipa_alloc_node_params (node, count_formal_params (node->decl));
265 ipa_populate_param_decls (node, info->descriptors);
266 }
267 }
268
269 /* Print the jump functions associated with call graph edge CS to file F. */
270
271 static void
272 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
273 {
274 int i, count;
275
276 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
277 for (i = 0; i < count; i++)
278 {
279 struct ipa_jump_func *jump_func;
280 enum jump_func_type type;
281
282 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
283 type = jump_func->type;
284
285 fprintf (f, " param %d: ", i);
286 if (type == IPA_JF_UNKNOWN)
287 fprintf (f, "UNKNOWN\n");
288 else if (type == IPA_JF_KNOWN_TYPE)
289 {
290 fprintf (f, "KNOWN TYPE: base ");
291 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
292 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
293 jump_func->value.known_type.offset);
294 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
295 fprintf (f, "\n");
296 }
297 else if (type == IPA_JF_CONST)
298 {
299 tree val = jump_func->value.constant.value;
300 fprintf (f, "CONST: ");
301 print_generic_expr (f, val, 0);
302 if (TREE_CODE (val) == ADDR_EXPR
303 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
304 {
305 fprintf (f, " -> ");
306 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
307 0);
308 }
309 fprintf (f, "\n");
310 }
311 else if (type == IPA_JF_PASS_THROUGH)
312 {
313 fprintf (f, "PASS THROUGH: ");
314 fprintf (f, "%d, op %s",
315 jump_func->value.pass_through.formal_id,
316 get_tree_code_name(jump_func->value.pass_through.operation));
317 if (jump_func->value.pass_through.operation != NOP_EXPR)
318 {
319 fprintf (f, " ");
320 print_generic_expr (f,
321 jump_func->value.pass_through.operand, 0);
322 }
323 if (jump_func->value.pass_through.agg_preserved)
324 fprintf (f, ", agg_preserved");
325 if (jump_func->value.pass_through.type_preserved)
326 fprintf (f, ", type_preserved");
327 fprintf (f, "\n");
328 }
329 else if (type == IPA_JF_ANCESTOR)
330 {
331 fprintf (f, "ANCESTOR: ");
332 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
333 jump_func->value.ancestor.formal_id,
334 jump_func->value.ancestor.offset);
335 print_generic_expr (f, jump_func->value.ancestor.type, 0);
336 if (jump_func->value.ancestor.agg_preserved)
337 fprintf (f, ", agg_preserved");
338 if (jump_func->value.ancestor.type_preserved)
339 fprintf (f, ", type_preserved");
340 fprintf (f, "\n");
341 }
342
343 if (jump_func->agg.items)
344 {
345 struct ipa_agg_jf_item *item;
346 int j;
347
348 fprintf (f, " Aggregate passed by %s:\n",
349 jump_func->agg.by_ref ? "reference" : "value");
350 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
351 {
352 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
353 item->offset);
354 if (TYPE_P (item->value))
355 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
356 tree_to_uhwi (TYPE_SIZE (item->value)));
357 else
358 {
359 fprintf (f, "cst: ");
360 print_generic_expr (f, item->value, 0);
361 }
362 fprintf (f, "\n");
363 }
364 }
365 }
366 }
367
368
369 /* Print the jump functions of all arguments on all call graph edges going from
370 NODE to file F. */
371
372 void
373 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
374 {
375 struct cgraph_edge *cs;
376
377 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
378 node->order);
379 for (cs = node->callees; cs; cs = cs->next_callee)
380 {
381 if (!ipa_edge_args_info_available_for_edge_p (cs))
382 continue;
383
384 fprintf (f, " callsite %s/%i -> %s/%i : \n",
385 xstrdup (node->name ()), node->order,
386 xstrdup (cs->callee->name ()),
387 cs->callee->order);
388 ipa_print_node_jump_functions_for_edge (f, cs);
389 }
390
391 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
392 {
393 struct cgraph_indirect_call_info *ii;
394 if (!ipa_edge_args_info_available_for_edge_p (cs))
395 continue;
396
397 ii = cs->indirect_info;
398 if (ii->agg_contents)
399 fprintf (f, " indirect %s callsite, calling param %i, "
400 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
401 ii->member_ptr ? "member ptr" : "aggregate",
402 ii->param_index, ii->offset,
403 ii->by_ref ? "by reference" : "by_value");
404 else
405 fprintf (f, " indirect %s callsite, calling param %i, "
406 "offset " HOST_WIDE_INT_PRINT_DEC,
407 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
408 ii->offset);
409
410 if (cs->call_stmt)
411 {
412 fprintf (f, ", for stmt ");
413 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
414 }
415 else
416 fprintf (f, "\n");
417 ipa_print_node_jump_functions_for_edge (f, cs);
418 }
419 }
420
421 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
422
423 void
424 ipa_print_all_jump_functions (FILE *f)
425 {
426 struct cgraph_node *node;
427
428 fprintf (f, "\nJump functions:\n");
429 FOR_EACH_FUNCTION (node)
430 {
431 ipa_print_node_jump_functions (f, node);
432 }
433 }
434
435 /* Set JFUNC to be a known type jump function. */
436
437 static void
438 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
439 tree base_type, tree component_type)
440 {
441 /* Recording and propagating main variants increases change that types
442 will match. */
443 base_type = TYPE_MAIN_VARIANT (base_type);
444 component_type = TYPE_MAIN_VARIANT (component_type);
445
446 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
447 && TYPE_BINFO (component_type));
448 if (!flag_devirtualize)
449 return;
450 gcc_assert (BINFO_VTABLE (TYPE_BINFO (component_type)));
451 jfunc->type = IPA_JF_KNOWN_TYPE;
452 jfunc->value.known_type.offset = offset,
453 jfunc->value.known_type.base_type = base_type;
454 jfunc->value.known_type.component_type = component_type;
455 gcc_assert (component_type);
456 }
457
458 /* Set JFUNC to be a copy of another jmp (to be used by jump function
459 combination code). The two functions will share their rdesc. */
460
461 static void
462 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
463 struct ipa_jump_func *src)
464
465 {
466 gcc_checking_assert (src->type == IPA_JF_CONST);
467 dst->type = IPA_JF_CONST;
468 dst->value.constant = src->value.constant;
469 }
470
471 /* Set JFUNC to be a constant jmp function. */
472
473 static void
474 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
475 struct cgraph_edge *cs)
476 {
477 constant = unshare_expr (constant);
478 if (constant && EXPR_P (constant))
479 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
480 jfunc->type = IPA_JF_CONST;
481 jfunc->value.constant.value = unshare_expr_without_location (constant);
482
483 if (TREE_CODE (constant) == ADDR_EXPR
484 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
485 {
486 struct ipa_cst_ref_desc *rdesc;
487 if (!ipa_refdesc_pool)
488 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
489 sizeof (struct ipa_cst_ref_desc), 32);
490
491 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
492 rdesc->cs = cs;
493 rdesc->next_duplicate = NULL;
494 rdesc->refcount = 1;
495 jfunc->value.constant.rdesc = rdesc;
496 }
497 else
498 jfunc->value.constant.rdesc = NULL;
499 }
500
501 /* Set JFUNC to be a simple pass-through jump function. */
502 static void
503 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
504 bool agg_preserved, bool type_preserved)
505 {
506 jfunc->type = IPA_JF_PASS_THROUGH;
507 jfunc->value.pass_through.operand = NULL_TREE;
508 jfunc->value.pass_through.formal_id = formal_id;
509 jfunc->value.pass_through.operation = NOP_EXPR;
510 jfunc->value.pass_through.agg_preserved = agg_preserved;
511 jfunc->value.pass_through.type_preserved = type_preserved;
512 }
513
514 /* Set JFUNC to be an arithmetic pass through jump function. */
515
516 static void
517 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
518 tree operand, enum tree_code operation)
519 {
520 jfunc->type = IPA_JF_PASS_THROUGH;
521 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
522 jfunc->value.pass_through.formal_id = formal_id;
523 jfunc->value.pass_through.operation = operation;
524 jfunc->value.pass_through.agg_preserved = false;
525 jfunc->value.pass_through.type_preserved = false;
526 }
527
528 /* Set JFUNC to be an ancestor jump function. */
529
530 static void
531 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
532 tree type, int formal_id, bool agg_preserved,
533 bool type_preserved)
534 {
535 if (!flag_devirtualize)
536 type_preserved = false;
537 if (type)
538 type = TYPE_MAIN_VARIANT (type);
539 gcc_assert (!type_preserved
540 || (TREE_CODE (type) == RECORD_TYPE
541 && TYPE_BINFO (type)
542 && BINFO_VTABLE (TYPE_BINFO (type))));
543 jfunc->type = IPA_JF_ANCESTOR;
544 jfunc->value.ancestor.formal_id = formal_id;
545 jfunc->value.ancestor.offset = offset;
546 jfunc->value.ancestor.type = type_preserved ? type : NULL;
547 jfunc->value.ancestor.agg_preserved = agg_preserved;
548 jfunc->value.ancestor.type_preserved = type_preserved;
549 }
550
551 /* Extract the acual BINFO being described by JFUNC which must be a known type
552 jump function. */
553
554 tree
555 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
556 {
557 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
558 if (!base_binfo)
559 return NULL_TREE;
560 return get_binfo_at_offset (base_binfo,
561 jfunc->value.known_type.offset,
562 jfunc->value.known_type.component_type);
563 }
564
565 /* Get IPA BB information about the given BB. FBI is the context of analyzis
566 of this function body. */
567
568 static struct ipa_bb_info *
569 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
570 {
571 gcc_checking_assert (fbi);
572 return &fbi->bb_infos[bb->index];
573 }
574
575 /* Structure to be passed in between detect_type_change and
576 check_stmt_for_type_change. */
577
578 struct type_change_info
579 {
580 /* Offset into the object where there is the virtual method pointer we are
581 looking for. */
582 HOST_WIDE_INT offset;
583 /* The declaration or SSA_NAME pointer of the base that we are checking for
584 type change. */
585 tree object;
586 /* If we actually can tell the type that the object has changed to, it is
587 stored in this field. Otherwise it remains NULL_TREE. */
588 tree known_current_type;
589 /* Set to true if dynamic type change has been detected. */
590 bool type_maybe_changed;
591 /* Set to true if multiple types have been encountered. known_current_type
592 must be disregarded in that case. */
593 bool multiple_types_encountered;
594 };
595
596 /* Return true if STMT can modify a virtual method table pointer.
597
598 This function makes special assumptions about both constructors and
599 destructors which are all the functions that are allowed to alter the VMT
600 pointers. It assumes that destructors begin with assignment into all VMT
601 pointers and that constructors essentially look in the following way:
602
603 1) The very first thing they do is that they call constructors of ancestor
604 sub-objects that have them.
605
606 2) Then VMT pointers of this and all its ancestors is set to new values
607 corresponding to the type corresponding to the constructor.
608
609 3) Only afterwards, other stuff such as constructor of member sub-objects
610 and the code written by the user is run. Only this may include calling
611 virtual functions, directly or indirectly.
612
613 There is no way to call a constructor of an ancestor sub-object in any
614 other way.
615
616 This means that we do not have to care whether constructors get the correct
617 type information because they will always change it (in fact, if we define
618 the type to be given by the VMT pointer, it is undefined).
619
620 The most important fact to derive from the above is that if, for some
621 statement in the section 3, we try to detect whether the dynamic type has
622 changed, we can safely ignore all calls as we examine the function body
623 backwards until we reach statements in section 2 because these calls cannot
624 be ancestor constructors or destructors (if the input is not bogus) and so
625 do not change the dynamic type (this holds true only for automatically
626 allocated objects but at the moment we devirtualize only these). We then
627 must detect that statements in section 2 change the dynamic type and can try
628 to derive the new type. That is enough and we can stop, we will never see
629 the calls into constructors of sub-objects in this code. Therefore we can
630 safely ignore all call statements that we traverse.
631 */
632
633 static bool
634 stmt_may_be_vtbl_ptr_store (gimple stmt)
635 {
636 if (is_gimple_call (stmt))
637 return false;
638 /* TODO: Skip clobbers, doing so triggers problem in PR60306. */
639 else if (is_gimple_assign (stmt))
640 {
641 tree lhs = gimple_assign_lhs (stmt);
642
643 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
644 {
645 if (flag_strict_aliasing
646 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
647 return false;
648
649 if (TREE_CODE (lhs) == COMPONENT_REF
650 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
651 return false;
652 /* In the future we might want to use get_base_ref_and_offset to find
653 if there is a field corresponding to the offset and if so, proceed
654 almost like if it was a component ref. */
655 }
656 }
657 return true;
658 }
659
660 /* If STMT can be proved to be an assignment to the virtual method table
661 pointer of ANALYZED_OBJ and the type associated with the new table
662 identified, return the type. Otherwise return NULL_TREE. */
663
664 static tree
665 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
666 {
667 HOST_WIDE_INT offset, size, max_size;
668 tree lhs, rhs, base, binfo;
669
670 if (!gimple_assign_single_p (stmt))
671 return NULL_TREE;
672
673 lhs = gimple_assign_lhs (stmt);
674 rhs = gimple_assign_rhs1 (stmt);
675 if (TREE_CODE (lhs) != COMPONENT_REF
676 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
677 return NULL_TREE;
678
679 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
680 if (offset != tci->offset
681 || size != POINTER_SIZE
682 || max_size != POINTER_SIZE)
683 return NULL_TREE;
684 if (TREE_CODE (base) == MEM_REF)
685 {
686 if (TREE_CODE (tci->object) != MEM_REF
687 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
688 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
689 TREE_OPERAND (base, 1)))
690 return NULL_TREE;
691 }
692 else if (tci->object != base)
693 return NULL_TREE;
694
695 binfo = vtable_pointer_value_to_binfo (rhs);
696
697 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
698 base of outer type. In this case we would need to either
699 work on binfos or translate it back to outer type and offset.
700 KNOWN_TYPE jump functions are not ready for that, yet. */
701 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
702 return NULL;
703
704 return BINFO_TYPE (binfo);
705 }
706
707 /* Callback of walk_aliased_vdefs and a helper function for
708 detect_type_change to check whether a particular statement may modify
709 the virtual table pointer, and if possible also determine the new type of
710 the (sub-)object. It stores its result into DATA, which points to a
711 type_change_info structure. */
712
713 static bool
714 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
715 {
716 gimple stmt = SSA_NAME_DEF_STMT (vdef);
717 struct type_change_info *tci = (struct type_change_info *) data;
718
719 if (stmt_may_be_vtbl_ptr_store (stmt))
720 {
721 tree type;
722
723 type = extr_type_from_vtbl_ptr_store (stmt, tci);
724 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
725 if (tci->type_maybe_changed
726 && type != tci->known_current_type)
727 tci->multiple_types_encountered = true;
728 tci->known_current_type = type;
729 tci->type_maybe_changed = true;
730 return true;
731 }
732 else
733 return false;
734 }
735
736
737
738 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
739 callsite CALL) by looking for assignments to its virtual table pointer. If
740 it is, return true and fill in the jump function JFUNC with relevant type
741 information or set it to unknown. ARG is the object itself (not a pointer
742 to it, unless dereferenced). BASE is the base of the memory access as
743 returned by get_ref_base_and_extent, as is the offset. */
744
745 static bool
746 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
747 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
748 {
749 struct type_change_info tci;
750 ao_ref ao;
751
752 gcc_checking_assert (DECL_P (arg)
753 || TREE_CODE (arg) == MEM_REF
754 || handled_component_p (arg));
755 /* Const calls cannot call virtual methods through VMT and so type changes do
756 not matter. */
757 if (!flag_devirtualize || !gimple_vuse (call)
758 /* Be sure expected_type is polymorphic. */
759 || !comp_type
760 || TREE_CODE (comp_type) != RECORD_TYPE
761 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
762 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
763 return true;
764
765 comp_type = TYPE_MAIN_VARIANT (comp_type);
766
767 /* C++ methods are not allowed to change THIS pointer unless they
768 are constructors or destructors. */
769 if (TREE_CODE (base) == MEM_REF
770 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
771 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
772 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base, 0))) == PARM_DECL
773 && TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
774 && !DECL_CXX_CONSTRUCTOR_P (current_function_decl)
775 && !DECL_CXX_DESTRUCTOR_P (current_function_decl)
776 && (SSA_NAME_VAR (TREE_OPERAND (base, 0))
777 == DECL_ARGUMENTS (current_function_decl)))
778 return false;
779
780 ao_ref_init (&ao, arg);
781 ao.base = base;
782 ao.offset = offset;
783 ao.size = POINTER_SIZE;
784 ao.max_size = ao.size;
785
786 tci.offset = offset;
787 tci.object = get_base_address (arg);
788 tci.known_current_type = NULL_TREE;
789 tci.type_maybe_changed = false;
790 tci.multiple_types_encountered = false;
791
792 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
793 &tci, NULL);
794 if (!tci.type_maybe_changed)
795 return false;
796
797 if (!tci.known_current_type
798 || tci.multiple_types_encountered
799 || offset != 0)
800 jfunc->type = IPA_JF_UNKNOWN;
801 else
802 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
803
804 return true;
805 }
806
807 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
808 SSA name (its dereference will become the base and the offset is assumed to
809 be zero). */
810
811 static bool
812 detect_type_change_ssa (tree arg, tree comp_type,
813 gimple call, struct ipa_jump_func *jfunc)
814 {
815 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
816 if (!flag_devirtualize
817 || !POINTER_TYPE_P (TREE_TYPE (arg)))
818 return false;
819
820 arg = build2 (MEM_REF, ptr_type_node, arg,
821 build_int_cst (ptr_type_node, 0));
822
823 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
824 }
825
826 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
827 boolean variable pointed to by DATA. */
828
829 static bool
830 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
831 void *data)
832 {
833 bool *b = (bool *) data;
834 *b = true;
835 return true;
836 }
837
838 /* Return true if we have already walked so many statements in AA that we
839 should really just start giving up. */
840
841 static bool
842 aa_overwalked (struct func_body_info *fbi)
843 {
844 gcc_checking_assert (fbi);
845 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
846 }
847
848 /* Find the nearest valid aa status for parameter specified by INDEX that
849 dominates BB. */
850
851 static struct param_aa_status *
852 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
853 int index)
854 {
855 while (true)
856 {
857 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
858 if (!bb)
859 return NULL;
860 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
861 if (!bi->param_aa_statuses.is_empty ()
862 && bi->param_aa_statuses[index].valid)
863 return &bi->param_aa_statuses[index];
864 }
865 }
866
867 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
868 structures and/or intialize the result with a dominating description as
869 necessary. */
870
871 static struct param_aa_status *
872 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
873 int index)
874 {
875 gcc_checking_assert (fbi);
876 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
877 if (bi->param_aa_statuses.is_empty ())
878 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
879 struct param_aa_status *paa = &bi->param_aa_statuses[index];
880 if (!paa->valid)
881 {
882 gcc_checking_assert (!paa->parm_modified
883 && !paa->ref_modified
884 && !paa->pt_modified);
885 struct param_aa_status *dom_paa;
886 dom_paa = find_dominating_aa_status (fbi, bb, index);
887 if (dom_paa)
888 *paa = *dom_paa;
889 else
890 paa->valid = true;
891 }
892
893 return paa;
894 }
895
896 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
897 a value known not to be modified in this function before reaching the
898 statement STMT. FBI holds information about the function we have so far
899 gathered but do not survive the summary building stage. */
900
901 static bool
902 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
903 gimple stmt, tree parm_load)
904 {
905 struct param_aa_status *paa;
906 bool modified = false;
907 ao_ref refd;
908
909 /* FIXME: FBI can be NULL if we are being called from outside
910 ipa_node_analysis or ipcp_transform_function, which currently happens
911 during inlining analysis. It would be great to extend fbi's lifetime and
912 always have it. Currently, we are just not afraid of too much walking in
913 that case. */
914 if (fbi)
915 {
916 if (aa_overwalked (fbi))
917 return false;
918 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
919 if (paa->parm_modified)
920 return false;
921 }
922 else
923 paa = NULL;
924
925 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
926 ao_ref_init (&refd, parm_load);
927 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
928 &modified, NULL);
929 if (fbi)
930 fbi->aa_walked += walked;
931 if (paa && modified)
932 paa->parm_modified = true;
933 return !modified;
934 }
935
936 /* If STMT is an assignment that loads a value from an parameter declaration,
937 return the index of the parameter in ipa_node_params which has not been
938 modified. Otherwise return -1. */
939
940 static int
941 load_from_unmodified_param (struct func_body_info *fbi,
942 vec<ipa_param_descriptor> descriptors,
943 gimple stmt)
944 {
945 int index;
946 tree op1;
947
948 if (!gimple_assign_single_p (stmt))
949 return -1;
950
951 op1 = gimple_assign_rhs1 (stmt);
952 if (TREE_CODE (op1) != PARM_DECL)
953 return -1;
954
955 index = ipa_get_param_decl_index_1 (descriptors, op1);
956 if (index < 0
957 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
958 return -1;
959
960 return index;
961 }
962
963 /* Return true if memory reference REF (which must be a load through parameter
964 with INDEX) loads data that are known to be unmodified in this function
965 before reaching statement STMT. */
966
967 static bool
968 parm_ref_data_preserved_p (struct func_body_info *fbi,
969 int index, gimple stmt, tree ref)
970 {
971 struct param_aa_status *paa;
972 bool modified = false;
973 ao_ref refd;
974
975 /* FIXME: FBI can be NULL if we are being called from outside
976 ipa_node_analysis or ipcp_transform_function, which currently happens
977 during inlining analysis. It would be great to extend fbi's lifetime and
978 always have it. Currently, we are just not afraid of too much walking in
979 that case. */
980 if (fbi)
981 {
982 if (aa_overwalked (fbi))
983 return false;
984 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
985 if (paa->ref_modified)
986 return false;
987 }
988 else
989 paa = NULL;
990
991 gcc_checking_assert (gimple_vuse (stmt));
992 ao_ref_init (&refd, ref);
993 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
994 &modified, NULL);
995 if (fbi)
996 fbi->aa_walked += walked;
997 if (paa && modified)
998 paa->ref_modified = true;
999 return !modified;
1000 }
1001
1002 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1003 is known to be unmodified in this function before reaching call statement
1004 CALL into which it is passed. FBI describes the function body. */
1005
1006 static bool
1007 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1008 gimple call, tree parm)
1009 {
1010 bool modified = false;
1011 ao_ref refd;
1012
1013 /* It's unnecessary to calculate anything about memory contnets for a const
1014 function because it is not goin to use it. But do not cache the result
1015 either. Also, no such calculations for non-pointers. */
1016 if (!gimple_vuse (call)
1017 || !POINTER_TYPE_P (TREE_TYPE (parm))
1018 || aa_overwalked (fbi))
1019 return false;
1020
1021 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1022 index);
1023 if (paa->pt_modified)
1024 return false;
1025
1026 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1027 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1028 &modified, NULL);
1029 fbi->aa_walked += walked;
1030 if (modified)
1031 paa->pt_modified = true;
1032 return !modified;
1033 }
1034
1035 /* Return true if we can prove that OP is a memory reference loading unmodified
1036 data from an aggregate passed as a parameter and if the aggregate is passed
1037 by reference, that the alias type of the load corresponds to the type of the
1038 formal parameter (so that we can rely on this type for TBAA in callers).
1039 INFO and PARMS_AINFO describe parameters of the current function (but the
1040 latter can be NULL), STMT is the load statement. If function returns true,
1041 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1042 within the aggregate and whether it is a load from a value passed by
1043 reference respectively. */
1044
1045 static bool
1046 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1047 vec<ipa_param_descriptor> descriptors,
1048 gimple stmt, tree op, int *index_p,
1049 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1050 bool *by_ref_p)
1051 {
1052 int index;
1053 HOST_WIDE_INT size, max_size;
1054 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1055
1056 if (max_size == -1 || max_size != size || *offset_p < 0)
1057 return false;
1058
1059 if (DECL_P (base))
1060 {
1061 int index = ipa_get_param_decl_index_1 (descriptors, base);
1062 if (index >= 0
1063 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1064 {
1065 *index_p = index;
1066 *by_ref_p = false;
1067 if (size_p)
1068 *size_p = size;
1069 return true;
1070 }
1071 return false;
1072 }
1073
1074 if (TREE_CODE (base) != MEM_REF
1075 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1076 || !integer_zerop (TREE_OPERAND (base, 1)))
1077 return false;
1078
1079 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1080 {
1081 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1082 index = ipa_get_param_decl_index_1 (descriptors, parm);
1083 }
1084 else
1085 {
1086 /* This branch catches situations where a pointer parameter is not a
1087 gimple register, for example:
1088
1089 void hip7(S*) (struct S * p)
1090 {
1091 void (*<T2e4>) (struct S *) D.1867;
1092 struct S * p.1;
1093
1094 <bb 2>:
1095 p.1_1 = p;
1096 D.1867_2 = p.1_1->f;
1097 D.1867_2 ();
1098 gdp = &p;
1099 */
1100
1101 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1102 index = load_from_unmodified_param (fbi, descriptors, def);
1103 }
1104
1105 if (index >= 0
1106 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1107 {
1108 *index_p = index;
1109 *by_ref_p = true;
1110 if (size_p)
1111 *size_p = size;
1112 return true;
1113 }
1114 return false;
1115 }
1116
1117 /* Just like the previous function, just without the param_analysis_info
1118 pointer, for users outside of this file. */
1119
1120 bool
1121 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1122 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1123 bool *by_ref_p)
1124 {
1125 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1126 offset_p, NULL, by_ref_p);
1127 }
1128
1129 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1130 of an assignment statement STMT, try to determine whether we are actually
1131 handling any of the following cases and construct an appropriate jump
1132 function into JFUNC if so:
1133
1134 1) The passed value is loaded from a formal parameter which is not a gimple
1135 register (most probably because it is addressable, the value has to be
1136 scalar) and we can guarantee the value has not changed. This case can
1137 therefore be described by a simple pass-through jump function. For example:
1138
1139 foo (int a)
1140 {
1141 int a.0;
1142
1143 a.0_2 = a;
1144 bar (a.0_2);
1145
1146 2) The passed value can be described by a simple arithmetic pass-through
1147 jump function. E.g.
1148
1149 foo (int a)
1150 {
1151 int D.2064;
1152
1153 D.2064_4 = a.1(D) + 4;
1154 bar (D.2064_4);
1155
1156 This case can also occur in combination of the previous one, e.g.:
1157
1158 foo (int a, int z)
1159 {
1160 int a.0;
1161 int D.2064;
1162
1163 a.0_3 = a;
1164 D.2064_4 = a.0_3 + 4;
1165 foo (D.2064_4);
1166
1167 3) The passed value is an address of an object within another one (which
1168 also passed by reference). Such situations are described by an ancestor
1169 jump function and describe situations such as:
1170
1171 B::foo() (struct B * const this)
1172 {
1173 struct A * D.1845;
1174
1175 D.1845_2 = &this_1(D)->D.1748;
1176 A::bar (D.1845_2);
1177
1178 INFO is the structure describing individual parameters access different
1179 stages of IPA optimizations. PARMS_AINFO contains the information that is
1180 only needed for intraprocedural analysis. */
1181
1182 static void
1183 compute_complex_assign_jump_func (struct func_body_info *fbi,
1184 struct ipa_node_params *info,
1185 struct ipa_jump_func *jfunc,
1186 gimple call, gimple stmt, tree name,
1187 tree param_type)
1188 {
1189 HOST_WIDE_INT offset, size, max_size;
1190 tree op1, tc_ssa, base, ssa;
1191 int index;
1192
1193 op1 = gimple_assign_rhs1 (stmt);
1194
1195 if (TREE_CODE (op1) == SSA_NAME)
1196 {
1197 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1198 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1199 else
1200 index = load_from_unmodified_param (fbi, info->descriptors,
1201 SSA_NAME_DEF_STMT (op1));
1202 tc_ssa = op1;
1203 }
1204 else
1205 {
1206 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1207 tc_ssa = gimple_assign_lhs (stmt);
1208 }
1209
1210 if (index >= 0)
1211 {
1212 tree op2 = gimple_assign_rhs2 (stmt);
1213
1214 if (op2)
1215 {
1216 if (!is_gimple_ip_invariant (op2)
1217 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1218 && !useless_type_conversion_p (TREE_TYPE (name),
1219 TREE_TYPE (op1))))
1220 return;
1221
1222 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1223 gimple_assign_rhs_code (stmt));
1224 }
1225 else if (gimple_assign_single_p (stmt))
1226 {
1227 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1228 bool type_p = false;
1229
1230 if (param_type && POINTER_TYPE_P (param_type))
1231 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1232 call, jfunc);
1233 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1234 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1235 }
1236 return;
1237 }
1238
1239 if (TREE_CODE (op1) != ADDR_EXPR)
1240 return;
1241 op1 = TREE_OPERAND (op1, 0);
1242 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1243 return;
1244 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1245 if (TREE_CODE (base) != MEM_REF
1246 /* If this is a varying address, punt. */
1247 || max_size == -1
1248 || max_size != size)
1249 return;
1250 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1251 ssa = TREE_OPERAND (base, 0);
1252 if (TREE_CODE (ssa) != SSA_NAME
1253 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1254 || offset < 0)
1255 return;
1256
1257 /* Dynamic types are changed in constructors and destructors. */
1258 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1259 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1260 {
1261 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1262 call, jfunc, offset);
1263 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1264 ipa_set_ancestor_jf (jfunc, offset,
1265 type_p ? TREE_TYPE (param_type) : NULL, index,
1266 parm_ref_data_pass_through_p (fbi, index,
1267 call, ssa), type_p);
1268 }
1269 }
1270
1271 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1272 it looks like:
1273
1274 iftmp.1_3 = &obj_2(D)->D.1762;
1275
1276 The base of the MEM_REF must be a default definition SSA NAME of a
1277 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1278 whole MEM_REF expression is returned and the offset calculated from any
1279 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1280 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1281
1282 static tree
1283 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1284 {
1285 HOST_WIDE_INT size, max_size;
1286 tree expr, parm, obj;
1287
1288 if (!gimple_assign_single_p (assign))
1289 return NULL_TREE;
1290 expr = gimple_assign_rhs1 (assign);
1291
1292 if (TREE_CODE (expr) != ADDR_EXPR)
1293 return NULL_TREE;
1294 expr = TREE_OPERAND (expr, 0);
1295 obj = expr;
1296 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1297
1298 if (TREE_CODE (expr) != MEM_REF
1299 /* If this is a varying address, punt. */
1300 || max_size == -1
1301 || max_size != size
1302 || *offset < 0)
1303 return NULL_TREE;
1304 parm = TREE_OPERAND (expr, 0);
1305 if (TREE_CODE (parm) != SSA_NAME
1306 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1307 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1308 return NULL_TREE;
1309
1310 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1311 *obj_p = obj;
1312 return expr;
1313 }
1314
1315
1316 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1317 statement PHI, try to find out whether NAME is in fact a
1318 multiple-inheritance typecast from a descendant into an ancestor of a formal
1319 parameter and thus can be described by an ancestor jump function and if so,
1320 write the appropriate function into JFUNC.
1321
1322 Essentially we want to match the following pattern:
1323
1324 if (obj_2(D) != 0B)
1325 goto <bb 3>;
1326 else
1327 goto <bb 4>;
1328
1329 <bb 3>:
1330 iftmp.1_3 = &obj_2(D)->D.1762;
1331
1332 <bb 4>:
1333 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1334 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1335 return D.1879_6; */
1336
1337 static void
1338 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1339 struct ipa_node_params *info,
1340 struct ipa_jump_func *jfunc,
1341 gimple call, gimple phi, tree param_type)
1342 {
1343 HOST_WIDE_INT offset;
1344 gimple assign, cond;
1345 basic_block phi_bb, assign_bb, cond_bb;
1346 tree tmp, parm, expr, obj;
1347 int index, i;
1348
1349 if (gimple_phi_num_args (phi) != 2)
1350 return;
1351
1352 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1353 tmp = PHI_ARG_DEF (phi, 0);
1354 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1355 tmp = PHI_ARG_DEF (phi, 1);
1356 else
1357 return;
1358 if (TREE_CODE (tmp) != SSA_NAME
1359 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1360 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1361 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1362 return;
1363
1364 assign = SSA_NAME_DEF_STMT (tmp);
1365 assign_bb = gimple_bb (assign);
1366 if (!single_pred_p (assign_bb))
1367 return;
1368 expr = get_ancestor_addr_info (assign, &obj, &offset);
1369 if (!expr)
1370 return;
1371 parm = TREE_OPERAND (expr, 0);
1372 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1373 if (index < 0)
1374 return;
1375
1376 cond_bb = single_pred (assign_bb);
1377 cond = last_stmt (cond_bb);
1378 if (!cond
1379 || gimple_code (cond) != GIMPLE_COND
1380 || gimple_cond_code (cond) != NE_EXPR
1381 || gimple_cond_lhs (cond) != parm
1382 || !integer_zerop (gimple_cond_rhs (cond)))
1383 return;
1384
1385 phi_bb = gimple_bb (phi);
1386 for (i = 0; i < 2; i++)
1387 {
1388 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1389 if (pred != assign_bb && pred != cond_bb)
1390 return;
1391 }
1392
1393 bool type_p = false;
1394 if (param_type && POINTER_TYPE_P (param_type))
1395 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1396 call, jfunc, offset);
1397 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1398 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1399 index,
1400 parm_ref_data_pass_through_p (fbi, index, call, parm),
1401 type_p);
1402 }
1403
1404 /* Given OP which is passed as an actual argument to a called function,
1405 determine if it is possible to construct a KNOWN_TYPE jump function for it
1406 and if so, create one and store it to JFUNC.
1407 EXPECTED_TYPE represents a type the argument should be in */
1408
1409 static void
1410 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1411 gimple call, tree expected_type)
1412 {
1413 HOST_WIDE_INT offset, size, max_size;
1414 tree base;
1415
1416 if (!flag_devirtualize
1417 || TREE_CODE (op) != ADDR_EXPR
1418 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1419 /* Be sure expected_type is polymorphic. */
1420 || !expected_type
1421 || TREE_CODE (expected_type) != RECORD_TYPE
1422 || !TYPE_BINFO (TYPE_MAIN_VARIANT (expected_type))
1423 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (expected_type))))
1424 return;
1425
1426 op = TREE_OPERAND (op, 0);
1427 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1428 if (!DECL_P (base)
1429 || max_size == -1
1430 || max_size != size
1431 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1432 || is_global_var (base))
1433 return;
1434
1435 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1436 return;
1437
1438 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1439 expected_type);
1440 }
1441
1442 /* Inspect the given TYPE and return true iff it has the same structure (the
1443 same number of fields of the same types) as a C++ member pointer. If
1444 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1445 corresponding fields there. */
1446
1447 static bool
1448 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1449 {
1450 tree fld;
1451
1452 if (TREE_CODE (type) != RECORD_TYPE)
1453 return false;
1454
1455 fld = TYPE_FIELDS (type);
1456 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1457 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1458 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1459 return false;
1460
1461 if (method_ptr)
1462 *method_ptr = fld;
1463
1464 fld = DECL_CHAIN (fld);
1465 if (!fld || INTEGRAL_TYPE_P (fld)
1466 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1467 return false;
1468 if (delta)
1469 *delta = fld;
1470
1471 if (DECL_CHAIN (fld))
1472 return false;
1473
1474 return true;
1475 }
1476
1477 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1478 return the rhs of its defining statement. Otherwise return RHS as it
1479 is. */
1480
1481 static inline tree
1482 get_ssa_def_if_simple_copy (tree rhs)
1483 {
1484 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1485 {
1486 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1487
1488 if (gimple_assign_single_p (def_stmt))
1489 rhs = gimple_assign_rhs1 (def_stmt);
1490 else
1491 break;
1492 }
1493 return rhs;
1494 }
1495
1496 /* Simple linked list, describing known contents of an aggregate beforere
1497 call. */
1498
1499 struct ipa_known_agg_contents_list
1500 {
1501 /* Offset and size of the described part of the aggregate. */
1502 HOST_WIDE_INT offset, size;
1503 /* Known constant value or NULL if the contents is known to be unknown. */
1504 tree constant;
1505 /* Pointer to the next structure in the list. */
1506 struct ipa_known_agg_contents_list *next;
1507 };
1508
1509 /* Find the proper place in linked list of ipa_known_agg_contents_list
1510 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1511 unless there is a partial overlap, in which case return NULL, or such
1512 element is already there, in which case set *ALREADY_THERE to true. */
1513
1514 static struct ipa_known_agg_contents_list **
1515 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1516 HOST_WIDE_INT lhs_offset,
1517 HOST_WIDE_INT lhs_size,
1518 bool *already_there)
1519 {
1520 struct ipa_known_agg_contents_list **p = list;
1521 while (*p && (*p)->offset < lhs_offset)
1522 {
1523 if ((*p)->offset + (*p)->size > lhs_offset)
1524 return NULL;
1525 p = &(*p)->next;
1526 }
1527
1528 if (*p && (*p)->offset < lhs_offset + lhs_size)
1529 {
1530 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1531 /* We already know this value is subsequently overwritten with
1532 something else. */
1533 *already_there = true;
1534 else
1535 /* Otherwise this is a partial overlap which we cannot
1536 represent. */
1537 return NULL;
1538 }
1539 return p;
1540 }
1541
1542 /* Build aggregate jump function from LIST, assuming there are exactly
1543 CONST_COUNT constant entries there and that th offset of the passed argument
1544 is ARG_OFFSET and store it into JFUNC. */
1545
1546 static void
1547 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1548 int const_count, HOST_WIDE_INT arg_offset,
1549 struct ipa_jump_func *jfunc)
1550 {
1551 vec_alloc (jfunc->agg.items, const_count);
1552 while (list)
1553 {
1554 if (list->constant)
1555 {
1556 struct ipa_agg_jf_item item;
1557 item.offset = list->offset - arg_offset;
1558 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1559 item.value = unshare_expr_without_location (list->constant);
1560 jfunc->agg.items->quick_push (item);
1561 }
1562 list = list->next;
1563 }
1564 }
1565
1566 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1567 in ARG is filled in with constant values. ARG can either be an aggregate
1568 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1569 aggregate. JFUNC is the jump function into which the constants are
1570 subsequently stored. */
1571
1572 static void
1573 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1574 struct ipa_jump_func *jfunc)
1575 {
1576 struct ipa_known_agg_contents_list *list = NULL;
1577 int item_count = 0, const_count = 0;
1578 HOST_WIDE_INT arg_offset, arg_size;
1579 gimple_stmt_iterator gsi;
1580 tree arg_base;
1581 bool check_ref, by_ref;
1582 ao_ref r;
1583
1584 /* The function operates in three stages. First, we prepare check_ref, r,
1585 arg_base and arg_offset based on what is actually passed as an actual
1586 argument. */
1587
1588 if (POINTER_TYPE_P (arg_type))
1589 {
1590 by_ref = true;
1591 if (TREE_CODE (arg) == SSA_NAME)
1592 {
1593 tree type_size;
1594 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1595 return;
1596 check_ref = true;
1597 arg_base = arg;
1598 arg_offset = 0;
1599 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1600 arg_size = tree_to_uhwi (type_size);
1601 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1602 }
1603 else if (TREE_CODE (arg) == ADDR_EXPR)
1604 {
1605 HOST_WIDE_INT arg_max_size;
1606
1607 arg = TREE_OPERAND (arg, 0);
1608 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1609 &arg_max_size);
1610 if (arg_max_size == -1
1611 || arg_max_size != arg_size
1612 || arg_offset < 0)
1613 return;
1614 if (DECL_P (arg_base))
1615 {
1616 check_ref = false;
1617 ao_ref_init (&r, arg_base);
1618 }
1619 else
1620 return;
1621 }
1622 else
1623 return;
1624 }
1625 else
1626 {
1627 HOST_WIDE_INT arg_max_size;
1628
1629 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1630
1631 by_ref = false;
1632 check_ref = false;
1633 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1634 &arg_max_size);
1635 if (arg_max_size == -1
1636 || arg_max_size != arg_size
1637 || arg_offset < 0)
1638 return;
1639
1640 ao_ref_init (&r, arg);
1641 }
1642
1643 /* Second stage walks back the BB, looks at individual statements and as long
1644 as it is confident of how the statements affect contents of the
1645 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1646 describing it. */
1647 gsi = gsi_for_stmt (call);
1648 gsi_prev (&gsi);
1649 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1650 {
1651 struct ipa_known_agg_contents_list *n, **p;
1652 gimple stmt = gsi_stmt (gsi);
1653 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1654 tree lhs, rhs, lhs_base;
1655
1656 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1657 continue;
1658 if (!gimple_assign_single_p (stmt))
1659 break;
1660
1661 lhs = gimple_assign_lhs (stmt);
1662 rhs = gimple_assign_rhs1 (stmt);
1663 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1664 || TREE_CODE (lhs) == BIT_FIELD_REF
1665 || contains_bitfld_component_ref_p (lhs))
1666 break;
1667
1668 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1669 &lhs_max_size);
1670 if (lhs_max_size == -1
1671 || lhs_max_size != lhs_size)
1672 break;
1673
1674 if (check_ref)
1675 {
1676 if (TREE_CODE (lhs_base) != MEM_REF
1677 || TREE_OPERAND (lhs_base, 0) != arg_base
1678 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1679 break;
1680 }
1681 else if (lhs_base != arg_base)
1682 {
1683 if (DECL_P (lhs_base))
1684 continue;
1685 else
1686 break;
1687 }
1688
1689 bool already_there = false;
1690 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1691 &already_there);
1692 if (!p)
1693 break;
1694 if (already_there)
1695 continue;
1696
1697 rhs = get_ssa_def_if_simple_copy (rhs);
1698 n = XALLOCA (struct ipa_known_agg_contents_list);
1699 n->size = lhs_size;
1700 n->offset = lhs_offset;
1701 if (is_gimple_ip_invariant (rhs))
1702 {
1703 n->constant = rhs;
1704 const_count++;
1705 }
1706 else
1707 n->constant = NULL_TREE;
1708 n->next = *p;
1709 *p = n;
1710
1711 item_count++;
1712 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1713 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1714 break;
1715 }
1716
1717 /* Third stage just goes over the list and creates an appropriate vector of
1718 ipa_agg_jf_item structures out of it, of sourse only if there are
1719 any known constants to begin with. */
1720
1721 if (const_count)
1722 {
1723 jfunc->agg.by_ref = by_ref;
1724 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1725 }
1726 }
1727
1728 static tree
1729 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1730 {
1731 int n;
1732 tree type = (e->callee
1733 ? TREE_TYPE (e->callee->decl)
1734 : gimple_call_fntype (e->call_stmt));
1735 tree t = TYPE_ARG_TYPES (type);
1736
1737 for (n = 0; n < i; n++)
1738 {
1739 if (!t)
1740 break;
1741 t = TREE_CHAIN (t);
1742 }
1743 if (t)
1744 return TREE_VALUE (t);
1745 if (!e->callee)
1746 return NULL;
1747 t = DECL_ARGUMENTS (e->callee->decl);
1748 for (n = 0; n < i; n++)
1749 {
1750 if (!t)
1751 return NULL;
1752 t = TREE_CHAIN (t);
1753 }
1754 if (t)
1755 return TREE_TYPE (t);
1756 return NULL;
1757 }
1758
1759 /* Compute jump function for all arguments of callsite CS and insert the
1760 information in the jump_functions array in the ipa_edge_args corresponding
1761 to this callsite. */
1762
1763 static void
1764 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1765 struct cgraph_edge *cs)
1766 {
1767 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1768 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1769 gimple call = cs->call_stmt;
1770 int n, arg_num = gimple_call_num_args (call);
1771
1772 if (arg_num == 0 || args->jump_functions)
1773 return;
1774 vec_safe_grow_cleared (args->jump_functions, arg_num);
1775
1776 if (gimple_call_internal_p (call))
1777 return;
1778 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1779 return;
1780
1781 for (n = 0; n < arg_num; n++)
1782 {
1783 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1784 tree arg = gimple_call_arg (call, n);
1785 tree param_type = ipa_get_callee_param_type (cs, n);
1786
1787 if (is_gimple_ip_invariant (arg))
1788 ipa_set_jf_constant (jfunc, arg, cs);
1789 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1790 && TREE_CODE (arg) == PARM_DECL)
1791 {
1792 int index = ipa_get_param_decl_index (info, arg);
1793
1794 gcc_assert (index >=0);
1795 /* Aggregate passed by value, check for pass-through, otherwise we
1796 will attempt to fill in aggregate contents later in this
1797 for cycle. */
1798 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1799 {
1800 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1801 continue;
1802 }
1803 }
1804 else if (TREE_CODE (arg) == SSA_NAME)
1805 {
1806 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1807 {
1808 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1809 if (index >= 0)
1810 {
1811 bool agg_p, type_p;
1812 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1813 if (param_type && POINTER_TYPE_P (param_type))
1814 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1815 call, jfunc);
1816 else
1817 type_p = false;
1818 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1819 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1820 type_p);
1821 }
1822 }
1823 else
1824 {
1825 gimple stmt = SSA_NAME_DEF_STMT (arg);
1826 if (is_gimple_assign (stmt))
1827 compute_complex_assign_jump_func (fbi, info, jfunc,
1828 call, stmt, arg, param_type);
1829 else if (gimple_code (stmt) == GIMPLE_PHI)
1830 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1831 call, stmt, param_type);
1832 }
1833 }
1834 else
1835 compute_known_type_jump_func (arg, jfunc, call,
1836 param_type
1837 && POINTER_TYPE_P (param_type)
1838 ? TREE_TYPE (param_type)
1839 : NULL);
1840
1841 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1842 passed (because type conversions are ignored in gimple). Usually we can
1843 safely get type from function declaration, but in case of K&R prototypes or
1844 variadic functions we can try our luck with type of the pointer passed.
1845 TODO: Since we look for actual initialization of the memory object, we may better
1846 work out the type based on the memory stores we find. */
1847 if (!param_type)
1848 param_type = TREE_TYPE (arg);
1849
1850 if ((jfunc->type != IPA_JF_PASS_THROUGH
1851 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1852 && (jfunc->type != IPA_JF_ANCESTOR
1853 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1854 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1855 || POINTER_TYPE_P (param_type)))
1856 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1857 }
1858 }
1859
1860 /* Compute jump functions for all edges - both direct and indirect - outgoing
1861 from BB. */
1862
1863 static void
1864 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1865 {
1866 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1867 int i;
1868 struct cgraph_edge *cs;
1869
1870 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1871 {
1872 struct cgraph_node *callee = cs->callee;
1873
1874 if (callee)
1875 {
1876 cgraph_function_or_thunk_node (callee, NULL);
1877 /* We do not need to bother analyzing calls to unknown functions
1878 unless they may become known during lto/whopr. */
1879 if (!callee->definition && !flag_lto)
1880 continue;
1881 }
1882 ipa_compute_jump_functions_for_edge (fbi, cs);
1883 }
1884 }
1885
1886 /* If STMT looks like a statement loading a value from a member pointer formal
1887 parameter, return that parameter and store the offset of the field to
1888 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1889 might be clobbered). If USE_DELTA, then we look for a use of the delta
1890 field rather than the pfn. */
1891
1892 static tree
1893 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1894 HOST_WIDE_INT *offset_p)
1895 {
1896 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1897
1898 if (!gimple_assign_single_p (stmt))
1899 return NULL_TREE;
1900
1901 rhs = gimple_assign_rhs1 (stmt);
1902 if (TREE_CODE (rhs) == COMPONENT_REF)
1903 {
1904 ref_field = TREE_OPERAND (rhs, 1);
1905 rhs = TREE_OPERAND (rhs, 0);
1906 }
1907 else
1908 ref_field = NULL_TREE;
1909 if (TREE_CODE (rhs) != MEM_REF)
1910 return NULL_TREE;
1911 rec = TREE_OPERAND (rhs, 0);
1912 if (TREE_CODE (rec) != ADDR_EXPR)
1913 return NULL_TREE;
1914 rec = TREE_OPERAND (rec, 0);
1915 if (TREE_CODE (rec) != PARM_DECL
1916 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1917 return NULL_TREE;
1918 ref_offset = TREE_OPERAND (rhs, 1);
1919
1920 if (use_delta)
1921 fld = delta_field;
1922 else
1923 fld = ptr_field;
1924 if (offset_p)
1925 *offset_p = int_bit_position (fld);
1926
1927 if (ref_field)
1928 {
1929 if (integer_nonzerop (ref_offset))
1930 return NULL_TREE;
1931 return ref_field == fld ? rec : NULL_TREE;
1932 }
1933 else
1934 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1935 : NULL_TREE;
1936 }
1937
1938 /* Returns true iff T is an SSA_NAME defined by a statement. */
1939
1940 static bool
1941 ipa_is_ssa_with_stmt_def (tree t)
1942 {
1943 if (TREE_CODE (t) == SSA_NAME
1944 && !SSA_NAME_IS_DEFAULT_DEF (t))
1945 return true;
1946 else
1947 return false;
1948 }
1949
1950 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1951 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1952 indirect call graph edge. */
1953
1954 static struct cgraph_edge *
1955 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1956 {
1957 struct cgraph_edge *cs;
1958
1959 cs = cgraph_edge (node, stmt);
1960 cs->indirect_info->param_index = param_index;
1961 cs->indirect_info->agg_contents = 0;
1962 cs->indirect_info->member_ptr = 0;
1963 return cs;
1964 }
1965
1966 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1967 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1968 intermediate information about each formal parameter. Currently it checks
1969 whether the call calls a pointer that is a formal parameter and if so, the
1970 parameter is marked with the called flag and an indirect call graph edge
1971 describing the call is created. This is very simple for ordinary pointers
1972 represented in SSA but not-so-nice when it comes to member pointers. The
1973 ugly part of this function does nothing more than trying to match the
1974 pattern of such a call. An example of such a pattern is the gimple dump
1975 below, the call is on the last line:
1976
1977 <bb 2>:
1978 f$__delta_5 = f.__delta;
1979 f$__pfn_24 = f.__pfn;
1980
1981 or
1982 <bb 2>:
1983 f$__delta_5 = MEM[(struct *)&f];
1984 f$__pfn_24 = MEM[(struct *)&f + 4B];
1985
1986 and a few lines below:
1987
1988 <bb 5>
1989 D.2496_3 = (int) f$__pfn_24;
1990 D.2497_4 = D.2496_3 & 1;
1991 if (D.2497_4 != 0)
1992 goto <bb 3>;
1993 else
1994 goto <bb 4>;
1995
1996 <bb 6>:
1997 D.2500_7 = (unsigned int) f$__delta_5;
1998 D.2501_8 = &S + D.2500_7;
1999 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2000 D.2503_10 = *D.2502_9;
2001 D.2504_12 = f$__pfn_24 + -1;
2002 D.2505_13 = (unsigned int) D.2504_12;
2003 D.2506_14 = D.2503_10 + D.2505_13;
2004 D.2507_15 = *D.2506_14;
2005 iftmp.11_16 = (String:: *) D.2507_15;
2006
2007 <bb 7>:
2008 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2009 D.2500_19 = (unsigned int) f$__delta_5;
2010 D.2508_20 = &S + D.2500_19;
2011 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2012
2013 Such patterns are results of simple calls to a member pointer:
2014
2015 int doprinting (int (MyString::* f)(int) const)
2016 {
2017 MyString S ("somestring");
2018
2019 return (S.*f)(4);
2020 }
2021
2022 Moreover, the function also looks for called pointers loaded from aggregates
2023 passed by value or reference. */
2024
2025 static void
2026 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2027 tree target)
2028 {
2029 struct ipa_node_params *info = fbi->info;
2030 HOST_WIDE_INT offset;
2031 bool by_ref;
2032
2033 if (SSA_NAME_IS_DEFAULT_DEF (target))
2034 {
2035 tree var = SSA_NAME_VAR (target);
2036 int index = ipa_get_param_decl_index (info, var);
2037 if (index >= 0)
2038 ipa_note_param_call (fbi->node, index, call);
2039 return;
2040 }
2041
2042 int index;
2043 gimple def = SSA_NAME_DEF_STMT (target);
2044 if (gimple_assign_single_p (def)
2045 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2046 gimple_assign_rhs1 (def), &index, &offset,
2047 NULL, &by_ref))
2048 {
2049 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2050 if (cs->indirect_info->offset != offset)
2051 cs->indirect_info->outer_type = NULL;
2052 cs->indirect_info->offset = offset;
2053 cs->indirect_info->agg_contents = 1;
2054 cs->indirect_info->by_ref = by_ref;
2055 return;
2056 }
2057
2058 /* Now we need to try to match the complex pattern of calling a member
2059 pointer. */
2060 if (gimple_code (def) != GIMPLE_PHI
2061 || gimple_phi_num_args (def) != 2
2062 || !POINTER_TYPE_P (TREE_TYPE (target))
2063 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2064 return;
2065
2066 /* First, we need to check whether one of these is a load from a member
2067 pointer that is a parameter to this function. */
2068 tree n1 = PHI_ARG_DEF (def, 0);
2069 tree n2 = PHI_ARG_DEF (def, 1);
2070 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2071 return;
2072 gimple d1 = SSA_NAME_DEF_STMT (n1);
2073 gimple d2 = SSA_NAME_DEF_STMT (n2);
2074
2075 tree rec;
2076 basic_block bb, virt_bb;
2077 basic_block join = gimple_bb (def);
2078 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2079 {
2080 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2081 return;
2082
2083 bb = EDGE_PRED (join, 0)->src;
2084 virt_bb = gimple_bb (d2);
2085 }
2086 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2087 {
2088 bb = EDGE_PRED (join, 1)->src;
2089 virt_bb = gimple_bb (d1);
2090 }
2091 else
2092 return;
2093
2094 /* Second, we need to check that the basic blocks are laid out in the way
2095 corresponding to the pattern. */
2096
2097 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2098 || single_pred (virt_bb) != bb
2099 || single_succ (virt_bb) != join)
2100 return;
2101
2102 /* Third, let's see that the branching is done depending on the least
2103 significant bit of the pfn. */
2104
2105 gimple branch = last_stmt (bb);
2106 if (!branch || gimple_code (branch) != GIMPLE_COND)
2107 return;
2108
2109 if ((gimple_cond_code (branch) != NE_EXPR
2110 && gimple_cond_code (branch) != EQ_EXPR)
2111 || !integer_zerop (gimple_cond_rhs (branch)))
2112 return;
2113
2114 tree cond = gimple_cond_lhs (branch);
2115 if (!ipa_is_ssa_with_stmt_def (cond))
2116 return;
2117
2118 def = SSA_NAME_DEF_STMT (cond);
2119 if (!is_gimple_assign (def)
2120 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2121 || !integer_onep (gimple_assign_rhs2 (def)))
2122 return;
2123
2124 cond = gimple_assign_rhs1 (def);
2125 if (!ipa_is_ssa_with_stmt_def (cond))
2126 return;
2127
2128 def = SSA_NAME_DEF_STMT (cond);
2129
2130 if (is_gimple_assign (def)
2131 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2132 {
2133 cond = gimple_assign_rhs1 (def);
2134 if (!ipa_is_ssa_with_stmt_def (cond))
2135 return;
2136 def = SSA_NAME_DEF_STMT (cond);
2137 }
2138
2139 tree rec2;
2140 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2141 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2142 == ptrmemfunc_vbit_in_delta),
2143 NULL);
2144 if (rec != rec2)
2145 return;
2146
2147 index = ipa_get_param_decl_index (info, rec);
2148 if (index >= 0
2149 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2150 {
2151 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2152 if (cs->indirect_info->offset != offset)
2153 cs->indirect_info->outer_type = NULL;
2154 cs->indirect_info->offset = offset;
2155 cs->indirect_info->agg_contents = 1;
2156 cs->indirect_info->member_ptr = 1;
2157 }
2158
2159 return;
2160 }
2161
2162 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2163 object referenced in the expression is a formal parameter of the caller
2164 FBI->node (described by FBI->info), create a call note for the
2165 statement. */
2166
2167 static void
2168 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2169 gimple call, tree target)
2170 {
2171 tree obj = OBJ_TYPE_REF_OBJECT (target);
2172 int index;
2173 HOST_WIDE_INT anc_offset;
2174
2175 if (!flag_devirtualize)
2176 return;
2177
2178 if (TREE_CODE (obj) != SSA_NAME)
2179 return;
2180
2181 struct ipa_node_params *info = fbi->info;
2182 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2183 {
2184 struct ipa_jump_func jfunc;
2185 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2186 return;
2187
2188 anc_offset = 0;
2189 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2190 gcc_assert (index >= 0);
2191 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2192 call, &jfunc))
2193 return;
2194 }
2195 else
2196 {
2197 struct ipa_jump_func jfunc;
2198 gimple stmt = SSA_NAME_DEF_STMT (obj);
2199 tree expr;
2200
2201 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2202 if (!expr)
2203 return;
2204 index = ipa_get_param_decl_index (info,
2205 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2206 gcc_assert (index >= 0);
2207 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2208 call, &jfunc, anc_offset))
2209 return;
2210 }
2211
2212 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2213 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2214 ii->offset = anc_offset;
2215 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2216 ii->otr_type = obj_type_ref_class (target);
2217 ii->polymorphic = 1;
2218 }
2219
2220 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2221 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2222 containing intermediate information about each formal parameter. */
2223
2224 static void
2225 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2226 {
2227 tree target = gimple_call_fn (call);
2228
2229 if (!target
2230 || (TREE_CODE (target) != SSA_NAME
2231 && !virtual_method_call_p (target)))
2232 return;
2233
2234 /* If we previously turned the call into a direct call, there is
2235 no need to analyze. */
2236 struct cgraph_edge *cs = cgraph_edge (fbi->node, call);
2237 if (cs && !cs->indirect_unknown_callee)
2238 return;
2239 if (TREE_CODE (target) == SSA_NAME)
2240 ipa_analyze_indirect_call_uses (fbi, call, target);
2241 else if (virtual_method_call_p (target))
2242 ipa_analyze_virtual_call_uses (fbi, call, target);
2243 }
2244
2245
2246 /* Analyze the call statement STMT with respect to formal parameters (described
2247 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2248 formal parameters are called. */
2249
2250 static void
2251 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2252 {
2253 if (is_gimple_call (stmt))
2254 ipa_analyze_call_uses (fbi, stmt);
2255 }
2256
2257 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2258 If OP is a parameter declaration, mark it as used in the info structure
2259 passed in DATA. */
2260
2261 static bool
2262 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2263 {
2264 struct ipa_node_params *info = (struct ipa_node_params *) data;
2265
2266 op = get_base_address (op);
2267 if (op
2268 && TREE_CODE (op) == PARM_DECL)
2269 {
2270 int index = ipa_get_param_decl_index (info, op);
2271 gcc_assert (index >= 0);
2272 ipa_set_param_used (info, index, true);
2273 }
2274
2275 return false;
2276 }
2277
2278 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2279 the findings in various structures of the associated ipa_node_params
2280 structure, such as parameter flags, notes etc. FBI holds various data about
2281 the function being analyzed. */
2282
2283 static void
2284 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2285 {
2286 gimple_stmt_iterator gsi;
2287 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2288 {
2289 gimple stmt = gsi_stmt (gsi);
2290
2291 if (is_gimple_debug (stmt))
2292 continue;
2293
2294 ipa_analyze_stmt_uses (fbi, stmt);
2295 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2296 visit_ref_for_mod_analysis,
2297 visit_ref_for_mod_analysis,
2298 visit_ref_for_mod_analysis);
2299 }
2300 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2301 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2302 visit_ref_for_mod_analysis,
2303 visit_ref_for_mod_analysis,
2304 visit_ref_for_mod_analysis);
2305 }
2306
2307 /* Calculate controlled uses of parameters of NODE. */
2308
2309 static void
2310 ipa_analyze_controlled_uses (struct cgraph_node *node)
2311 {
2312 struct ipa_node_params *info = IPA_NODE_REF (node);
2313
2314 for (int i = 0; i < ipa_get_param_count (info); i++)
2315 {
2316 tree parm = ipa_get_param (info, i);
2317 int controlled_uses = 0;
2318
2319 /* For SSA regs see if parameter is used. For non-SSA we compute
2320 the flag during modification analysis. */
2321 if (is_gimple_reg (parm))
2322 {
2323 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2324 parm);
2325 if (ddef && !has_zero_uses (ddef))
2326 {
2327 imm_use_iterator imm_iter;
2328 use_operand_p use_p;
2329
2330 ipa_set_param_used (info, i, true);
2331 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2332 if (!is_gimple_call (USE_STMT (use_p)))
2333 {
2334 if (!is_gimple_debug (USE_STMT (use_p)))
2335 {
2336 controlled_uses = IPA_UNDESCRIBED_USE;
2337 break;
2338 }
2339 }
2340 else
2341 controlled_uses++;
2342 }
2343 else
2344 controlled_uses = 0;
2345 }
2346 else
2347 controlled_uses = IPA_UNDESCRIBED_USE;
2348 ipa_set_controlled_uses (info, i, controlled_uses);
2349 }
2350 }
2351
2352 /* Free stuff in BI. */
2353
2354 static void
2355 free_ipa_bb_info (struct ipa_bb_info *bi)
2356 {
2357 bi->cg_edges.release ();
2358 bi->param_aa_statuses.release ();
2359 }
2360
2361 /* Dominator walker driving the analysis. */
2362
2363 class analysis_dom_walker : public dom_walker
2364 {
2365 public:
2366 analysis_dom_walker (struct func_body_info *fbi)
2367 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2368
2369 virtual void before_dom_children (basic_block);
2370
2371 private:
2372 struct func_body_info *m_fbi;
2373 };
2374
2375 void
2376 analysis_dom_walker::before_dom_children (basic_block bb)
2377 {
2378 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2379 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2380 }
2381
2382 /* Initialize the array describing properties of of formal parameters
2383 of NODE, analyze their uses and compute jump functions associated
2384 with actual arguments of calls from within NODE. */
2385
2386 void
2387 ipa_analyze_node (struct cgraph_node *node)
2388 {
2389 struct func_body_info fbi;
2390 struct ipa_node_params *info;
2391
2392 ipa_check_create_node_params ();
2393 ipa_check_create_edge_args ();
2394 info = IPA_NODE_REF (node);
2395
2396 if (info->analysis_done)
2397 return;
2398 info->analysis_done = 1;
2399
2400 if (ipa_func_spec_opts_forbid_analysis_p (node))
2401 {
2402 for (int i = 0; i < ipa_get_param_count (info); i++)
2403 {
2404 ipa_set_param_used (info, i, true);
2405 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2406 }
2407 return;
2408 }
2409
2410 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2411 push_cfun (func);
2412 calculate_dominance_info (CDI_DOMINATORS);
2413 ipa_initialize_node_params (node);
2414 ipa_analyze_controlled_uses (node);
2415
2416 fbi.node = node;
2417 fbi.info = IPA_NODE_REF (node);
2418 fbi.bb_infos = vNULL;
2419 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2420 fbi.param_count = ipa_get_param_count (info);
2421 fbi.aa_walked = 0;
2422
2423 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2424 {
2425 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2426 bi->cg_edges.safe_push (cs);
2427 }
2428
2429 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2430 {
2431 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2432 bi->cg_edges.safe_push (cs);
2433 }
2434
2435 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2436
2437 int i;
2438 struct ipa_bb_info *bi;
2439 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2440 free_ipa_bb_info (bi);
2441 fbi.bb_infos.release ();
2442 free_dominance_info (CDI_DOMINATORS);
2443 pop_cfun ();
2444 }
2445
2446 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2447 attempt a type-based devirtualization. If successful, return the
2448 target function declaration, otherwise return NULL. */
2449
2450 tree
2451 ipa_intraprocedural_devirtualization (gimple call)
2452 {
2453 tree binfo, token, fndecl;
2454 struct ipa_jump_func jfunc;
2455 tree otr = gimple_call_fn (call);
2456
2457 jfunc.type = IPA_JF_UNKNOWN;
2458 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2459 call, obj_type_ref_class (otr));
2460 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2461 return NULL_TREE;
2462 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2463 if (!binfo)
2464 return NULL_TREE;
2465 token = OBJ_TYPE_REF_TOKEN (otr);
2466 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2467 binfo);
2468 #ifdef ENABLE_CHECKING
2469 if (fndecl)
2470 gcc_assert (possible_polymorphic_call_target_p
2471 (otr, cgraph_get_node (fndecl)));
2472 #endif
2473 return fndecl;
2474 }
2475
2476 /* Update the jump function DST when the call graph edge corresponding to SRC is
2477 is being inlined, knowing that DST is of type ancestor and src of known
2478 type. */
2479
2480 static void
2481 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2482 struct ipa_jump_func *dst)
2483 {
2484 HOST_WIDE_INT combined_offset;
2485 tree combined_type;
2486
2487 if (!ipa_get_jf_ancestor_type_preserved (dst))
2488 {
2489 dst->type = IPA_JF_UNKNOWN;
2490 return;
2491 }
2492
2493 combined_offset = ipa_get_jf_known_type_offset (src)
2494 + ipa_get_jf_ancestor_offset (dst);
2495 combined_type = ipa_get_jf_ancestor_type (dst);
2496
2497 ipa_set_jf_known_type (dst, combined_offset,
2498 ipa_get_jf_known_type_base_type (src),
2499 combined_type);
2500 }
2501
2502 /* Update the jump functions associated with call graph edge E when the call
2503 graph edge CS is being inlined, assuming that E->caller is already (possibly
2504 indirectly) inlined into CS->callee and that E has not been inlined. */
2505
2506 static void
2507 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2508 struct cgraph_edge *e)
2509 {
2510 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2511 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2512 int count = ipa_get_cs_argument_count (args);
2513 int i;
2514
2515 for (i = 0; i < count; i++)
2516 {
2517 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2518
2519 if (dst->type == IPA_JF_ANCESTOR)
2520 {
2521 struct ipa_jump_func *src;
2522 int dst_fid = dst->value.ancestor.formal_id;
2523
2524 /* Variable number of arguments can cause havoc if we try to access
2525 one that does not exist in the inlined edge. So make sure we
2526 don't. */
2527 if (dst_fid >= ipa_get_cs_argument_count (top))
2528 {
2529 dst->type = IPA_JF_UNKNOWN;
2530 continue;
2531 }
2532
2533 src = ipa_get_ith_jump_func (top, dst_fid);
2534
2535 if (src->agg.items
2536 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2537 {
2538 struct ipa_agg_jf_item *item;
2539 int j;
2540
2541 /* Currently we do not produce clobber aggregate jump functions,
2542 replace with merging when we do. */
2543 gcc_assert (!dst->agg.items);
2544
2545 dst->agg.items = vec_safe_copy (src->agg.items);
2546 dst->agg.by_ref = src->agg.by_ref;
2547 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2548 item->offset -= dst->value.ancestor.offset;
2549 }
2550
2551 if (src->type == IPA_JF_KNOWN_TYPE)
2552 combine_known_type_and_ancestor_jfs (src, dst);
2553 else if (src->type == IPA_JF_PASS_THROUGH
2554 && src->value.pass_through.operation == NOP_EXPR)
2555 {
2556 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2557 dst->value.ancestor.agg_preserved &=
2558 src->value.pass_through.agg_preserved;
2559 dst->value.ancestor.type_preserved &=
2560 src->value.pass_through.type_preserved;
2561 }
2562 else if (src->type == IPA_JF_ANCESTOR)
2563 {
2564 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2565 dst->value.ancestor.offset += src->value.ancestor.offset;
2566 dst->value.ancestor.agg_preserved &=
2567 src->value.ancestor.agg_preserved;
2568 dst->value.ancestor.type_preserved &=
2569 src->value.ancestor.type_preserved;
2570 }
2571 else
2572 dst->type = IPA_JF_UNKNOWN;
2573 }
2574 else if (dst->type == IPA_JF_PASS_THROUGH)
2575 {
2576 struct ipa_jump_func *src;
2577 /* We must check range due to calls with variable number of arguments
2578 and we cannot combine jump functions with operations. */
2579 if (dst->value.pass_through.operation == NOP_EXPR
2580 && (dst->value.pass_through.formal_id
2581 < ipa_get_cs_argument_count (top)))
2582 {
2583 int dst_fid = dst->value.pass_through.formal_id;
2584 src = ipa_get_ith_jump_func (top, dst_fid);
2585 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2586
2587 switch (src->type)
2588 {
2589 case IPA_JF_UNKNOWN:
2590 dst->type = IPA_JF_UNKNOWN;
2591 break;
2592 case IPA_JF_KNOWN_TYPE:
2593 if (ipa_get_jf_pass_through_type_preserved (dst))
2594 ipa_set_jf_known_type (dst,
2595 ipa_get_jf_known_type_offset (src),
2596 ipa_get_jf_known_type_base_type (src),
2597 ipa_get_jf_known_type_component_type (src));
2598 else
2599 dst->type = IPA_JF_UNKNOWN;
2600 break;
2601 case IPA_JF_CONST:
2602 ipa_set_jf_cst_copy (dst, src);
2603 break;
2604
2605 case IPA_JF_PASS_THROUGH:
2606 {
2607 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2608 enum tree_code operation;
2609 operation = ipa_get_jf_pass_through_operation (src);
2610
2611 if (operation == NOP_EXPR)
2612 {
2613 bool agg_p, type_p;
2614 agg_p = dst_agg_p
2615 && ipa_get_jf_pass_through_agg_preserved (src);
2616 type_p = ipa_get_jf_pass_through_type_preserved (src)
2617 && ipa_get_jf_pass_through_type_preserved (dst);
2618 ipa_set_jf_simple_pass_through (dst, formal_id,
2619 agg_p, type_p);
2620 }
2621 else
2622 {
2623 tree operand = ipa_get_jf_pass_through_operand (src);
2624 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2625 operation);
2626 }
2627 break;
2628 }
2629 case IPA_JF_ANCESTOR:
2630 {
2631 bool agg_p, type_p;
2632 agg_p = dst_agg_p
2633 && ipa_get_jf_ancestor_agg_preserved (src);
2634 type_p = ipa_get_jf_ancestor_type_preserved (src)
2635 && ipa_get_jf_pass_through_type_preserved (dst);
2636 ipa_set_ancestor_jf (dst,
2637 ipa_get_jf_ancestor_offset (src),
2638 ipa_get_jf_ancestor_type (src),
2639 ipa_get_jf_ancestor_formal_id (src),
2640 agg_p, type_p);
2641 break;
2642 }
2643 default:
2644 gcc_unreachable ();
2645 }
2646
2647 if (src->agg.items
2648 && (dst_agg_p || !src->agg.by_ref))
2649 {
2650 /* Currently we do not produce clobber aggregate jump
2651 functions, replace with merging when we do. */
2652 gcc_assert (!dst->agg.items);
2653
2654 dst->agg.by_ref = src->agg.by_ref;
2655 dst->agg.items = vec_safe_copy (src->agg.items);
2656 }
2657 }
2658 else
2659 dst->type = IPA_JF_UNKNOWN;
2660 }
2661 }
2662 }
2663
2664 /* If TARGET is an addr_expr of a function declaration, make it the destination
2665 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2666
2667 struct cgraph_edge *
2668 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2669 {
2670 struct cgraph_node *callee;
2671 struct inline_edge_summary *es = inline_edge_summary (ie);
2672 bool unreachable = false;
2673
2674 if (TREE_CODE (target) == ADDR_EXPR)
2675 target = TREE_OPERAND (target, 0);
2676 if (TREE_CODE (target) != FUNCTION_DECL)
2677 {
2678 target = canonicalize_constructor_val (target, NULL);
2679 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2680 {
2681 if (ie->indirect_info->member_ptr)
2682 /* Member pointer call that goes through a VMT lookup. */
2683 return NULL;
2684
2685 if (dump_enabled_p ())
2686 {
2687 location_t loc = gimple_location_safe (ie->call_stmt);
2688 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2689 "discovered direct call to non-function in %s/%i, "
2690 "making it __builtin_unreachable\n",
2691 ie->caller->name (), ie->caller->order);
2692 }
2693
2694 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2695 callee = cgraph_get_create_node (target);
2696 unreachable = true;
2697 }
2698 else
2699 callee = cgraph_get_node (target);
2700 }
2701 else
2702 callee = cgraph_get_node (target);
2703
2704 /* Because may-edges are not explicitely represented and vtable may be external,
2705 we may create the first reference to the object in the unit. */
2706 if (!callee || callee->global.inlined_to)
2707 {
2708
2709 /* We are better to ensure we can refer to it.
2710 In the case of static functions we are out of luck, since we already
2711 removed its body. In the case of public functions we may or may
2712 not introduce the reference. */
2713 if (!canonicalize_constructor_val (target, NULL)
2714 || !TREE_PUBLIC (target))
2715 {
2716 if (dump_file)
2717 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2718 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2719 xstrdup (ie->caller->name ()),
2720 ie->caller->order,
2721 xstrdup (ie->callee->name ()),
2722 ie->callee->order);
2723 return NULL;
2724 }
2725 callee = cgraph_get_create_node (target);
2726 }
2727
2728 if (!dbg_cnt (devirt))
2729 return NULL;
2730
2731 ipa_check_create_node_params ();
2732
2733 /* We can not make edges to inline clones. It is bug that someone removed
2734 the cgraph node too early. */
2735 gcc_assert (!callee->global.inlined_to);
2736
2737 if (dump_file && !unreachable)
2738 {
2739 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2740 "(%s/%i -> %s/%i), for stmt ",
2741 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2742 xstrdup (ie->caller->name ()),
2743 ie->caller->order,
2744 xstrdup (callee->name ()),
2745 callee->order);
2746 if (ie->call_stmt)
2747 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2748 else
2749 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2750 }
2751 if (dump_enabled_p ())
2752 {
2753 location_t loc = gimple_location_safe (ie->call_stmt);
2754
2755 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2756 "converting indirect call in %s to direct call to %s\n",
2757 ie->caller->name (), callee->name ());
2758 }
2759 ie = cgraph_make_edge_direct (ie, callee);
2760 es = inline_edge_summary (ie);
2761 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2762 - eni_size_weights.call_cost);
2763 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2764 - eni_time_weights.call_cost);
2765
2766 return ie;
2767 }
2768
2769 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2770 return NULL if there is not any. BY_REF specifies whether the value has to
2771 be passed by reference or by value. */
2772
2773 tree
2774 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2775 HOST_WIDE_INT offset, bool by_ref)
2776 {
2777 struct ipa_agg_jf_item *item;
2778 int i;
2779
2780 if (by_ref != agg->by_ref)
2781 return NULL;
2782
2783 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2784 if (item->offset == offset)
2785 {
2786 /* Currently we do not have clobber values, return NULL for them once
2787 we do. */
2788 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2789 return item->value;
2790 }
2791 return NULL;
2792 }
2793
2794 /* Remove a reference to SYMBOL from the list of references of a node given by
2795 reference description RDESC. Return true if the reference has been
2796 successfully found and removed. */
2797
2798 static bool
2799 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2800 {
2801 struct ipa_ref *to_del;
2802 struct cgraph_edge *origin;
2803
2804 origin = rdesc->cs;
2805 if (!origin)
2806 return false;
2807 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2808 origin->lto_stmt_uid);
2809 if (!to_del)
2810 return false;
2811
2812 to_del->remove_reference ();
2813 if (dump_file)
2814 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2815 xstrdup (origin->caller->name ()),
2816 origin->caller->order, xstrdup (symbol->name ()));
2817 return true;
2818 }
2819
2820 /* If JFUNC has a reference description with refcount different from
2821 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2822 NULL. JFUNC must be a constant jump function. */
2823
2824 static struct ipa_cst_ref_desc *
2825 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2826 {
2827 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2828 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2829 return rdesc;
2830 else
2831 return NULL;
2832 }
2833
2834 /* If the value of constant jump function JFUNC is an address of a function
2835 declaration, return the associated call graph node. Otherwise return
2836 NULL. */
2837
2838 static cgraph_node *
2839 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2840 {
2841 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2842 tree cst = ipa_get_jf_constant (jfunc);
2843 if (TREE_CODE (cst) != ADDR_EXPR
2844 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2845 return NULL;
2846
2847 return cgraph_get_node (TREE_OPERAND (cst, 0));
2848 }
2849
2850
2851 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2852 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2853 the edge specified in the rdesc. Return false if either the symbol or the
2854 reference could not be found, otherwise return true. */
2855
2856 static bool
2857 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2858 {
2859 struct ipa_cst_ref_desc *rdesc;
2860 if (jfunc->type == IPA_JF_CONST
2861 && (rdesc = jfunc_rdesc_usable (jfunc))
2862 && --rdesc->refcount == 0)
2863 {
2864 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2865 if (!symbol)
2866 return false;
2867
2868 return remove_described_reference (symbol, rdesc);
2869 }
2870 return true;
2871 }
2872
2873 /* Try to find a destination for indirect edge IE that corresponds to a simple
2874 call or a call of a member function pointer and where the destination is a
2875 pointer formal parameter described by jump function JFUNC. If it can be
2876 determined, return the newly direct edge, otherwise return NULL.
2877 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2878
2879 static struct cgraph_edge *
2880 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2881 struct ipa_jump_func *jfunc,
2882 struct ipa_node_params *new_root_info)
2883 {
2884 struct cgraph_edge *cs;
2885 tree target;
2886 bool agg_contents = ie->indirect_info->agg_contents;
2887
2888 if (ie->indirect_info->agg_contents)
2889 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2890 ie->indirect_info->offset,
2891 ie->indirect_info->by_ref);
2892 else
2893 target = ipa_value_from_jfunc (new_root_info, jfunc);
2894 if (!target)
2895 return NULL;
2896 cs = ipa_make_edge_direct_to_target (ie, target);
2897
2898 if (cs && !agg_contents)
2899 {
2900 bool ok;
2901 gcc_checking_assert (cs->callee
2902 && (cs != ie
2903 || jfunc->type != IPA_JF_CONST
2904 || !cgraph_node_for_jfunc (jfunc)
2905 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2906 ok = try_decrement_rdesc_refcount (jfunc);
2907 gcc_checking_assert (ok);
2908 }
2909
2910 return cs;
2911 }
2912
2913 /* Return the target to be used in cases of impossible devirtualization. IE
2914 and target (the latter can be NULL) are dumped when dumping is enabled. */
2915
2916 tree
2917 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2918 {
2919 if (dump_file)
2920 {
2921 if (target)
2922 fprintf (dump_file,
2923 "Type inconsistent devirtualization: %s/%i->%s\n",
2924 ie->caller->name (), ie->caller->order,
2925 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2926 else
2927 fprintf (dump_file,
2928 "No devirtualization target in %s/%i\n",
2929 ie->caller->name (), ie->caller->order);
2930 }
2931 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2932 cgraph_get_create_node (new_target);
2933 return new_target;
2934 }
2935
2936 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2937 call based on a formal parameter which is described by jump function JFUNC
2938 and if it can be determined, make it direct and return the direct edge.
2939 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2940 are relative to. */
2941
2942 static struct cgraph_edge *
2943 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2944 struct ipa_jump_func *jfunc,
2945 struct ipa_node_params *new_root_info)
2946 {
2947 tree binfo, target;
2948
2949 if (!flag_devirtualize)
2950 return NULL;
2951
2952 /* First try to do lookup via known virtual table pointer value. */
2953 if (!ie->indirect_info->by_ref)
2954 {
2955 tree vtable;
2956 unsigned HOST_WIDE_INT offset;
2957 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2958 ie->indirect_info->offset,
2959 true);
2960 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2961 {
2962 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2963 vtable, offset);
2964 if (target)
2965 {
2966 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
2967 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
2968 || !possible_polymorphic_call_target_p
2969 (ie, cgraph_get_node (target)))
2970 target = ipa_impossible_devirt_target (ie, target);
2971 return ipa_make_edge_direct_to_target (ie, target);
2972 }
2973 }
2974 }
2975
2976 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2977
2978 if (!binfo)
2979 return NULL;
2980
2981 if (TREE_CODE (binfo) != TREE_BINFO)
2982 {
2983 ipa_polymorphic_call_context context;
2984 vec <cgraph_node *>targets;
2985 bool final;
2986
2987 if (!get_polymorphic_call_info_from_invariant
2988 (&context, binfo, ie->indirect_info->otr_type,
2989 ie->indirect_info->offset))
2990 return NULL;
2991 targets = possible_polymorphic_call_targets
2992 (ie->indirect_info->otr_type,
2993 ie->indirect_info->otr_token,
2994 context, &final);
2995 if (!final || targets.length () > 1)
2996 return NULL;
2997 if (targets.length () == 1)
2998 target = targets[0]->decl;
2999 else
3000 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3001 }
3002 else
3003 {
3004 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3005 ie->indirect_info->otr_type);
3006 if (binfo)
3007 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3008 binfo);
3009 else
3010 return NULL;
3011 }
3012
3013 if (target)
3014 {
3015 if (!possible_polymorphic_call_target_p (ie, cgraph_get_node (target)))
3016 target = ipa_impossible_devirt_target (ie, target);
3017 return ipa_make_edge_direct_to_target (ie, target);
3018 }
3019 else
3020 return NULL;
3021 }
3022
3023 /* Update the param called notes associated with NODE when CS is being inlined,
3024 assuming NODE is (potentially indirectly) inlined into CS->callee.
3025 Moreover, if the callee is discovered to be constant, create a new cgraph
3026 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3027 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3028
3029 static bool
3030 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3031 struct cgraph_node *node,
3032 vec<cgraph_edge_p> *new_edges)
3033 {
3034 struct ipa_edge_args *top;
3035 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3036 struct ipa_node_params *new_root_info;
3037 bool res = false;
3038
3039 ipa_check_create_edge_args ();
3040 top = IPA_EDGE_REF (cs);
3041 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3042 ? cs->caller->global.inlined_to
3043 : cs->caller);
3044
3045 for (ie = node->indirect_calls; ie; ie = next_ie)
3046 {
3047 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3048 struct ipa_jump_func *jfunc;
3049 int param_index;
3050
3051 next_ie = ie->next_callee;
3052
3053 if (ici->param_index == -1)
3054 continue;
3055
3056 /* We must check range due to calls with variable number of arguments: */
3057 if (ici->param_index >= ipa_get_cs_argument_count (top))
3058 {
3059 ici->param_index = -1;
3060 continue;
3061 }
3062
3063 param_index = ici->param_index;
3064 jfunc = ipa_get_ith_jump_func (top, param_index);
3065
3066 if (!flag_indirect_inlining)
3067 new_direct_edge = NULL;
3068 else if (ici->polymorphic)
3069 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3070 new_root_info);
3071 else
3072 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3073 new_root_info);
3074 /* If speculation was removed, then we need to do nothing. */
3075 if (new_direct_edge && new_direct_edge != ie)
3076 {
3077 new_direct_edge->indirect_inlining_edge = 1;
3078 top = IPA_EDGE_REF (cs);
3079 res = true;
3080 }
3081 else if (new_direct_edge)
3082 {
3083 new_direct_edge->indirect_inlining_edge = 1;
3084 if (new_direct_edge->call_stmt)
3085 new_direct_edge->call_stmt_cannot_inline_p
3086 = !gimple_check_call_matching_types (
3087 new_direct_edge->call_stmt,
3088 new_direct_edge->callee->decl, false);
3089 if (new_edges)
3090 {
3091 new_edges->safe_push (new_direct_edge);
3092 res = true;
3093 }
3094 top = IPA_EDGE_REF (cs);
3095 }
3096 else if (jfunc->type == IPA_JF_PASS_THROUGH
3097 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3098 {
3099 if ((ici->agg_contents
3100 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3101 || (ici->polymorphic
3102 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3103 ici->param_index = -1;
3104 else
3105 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3106 }
3107 else if (jfunc->type == IPA_JF_ANCESTOR)
3108 {
3109 if ((ici->agg_contents
3110 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3111 || (ici->polymorphic
3112 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3113 ici->param_index = -1;
3114 else
3115 {
3116 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3117 if (ipa_get_jf_ancestor_offset (jfunc))
3118 ici->outer_type = NULL;
3119 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3120 }
3121 }
3122 else
3123 /* Either we can find a destination for this edge now or never. */
3124 ici->param_index = -1;
3125 }
3126
3127 return res;
3128 }
3129
3130 /* Recursively traverse subtree of NODE (including node) made of inlined
3131 cgraph_edges when CS has been inlined and invoke
3132 update_indirect_edges_after_inlining on all nodes and
3133 update_jump_functions_after_inlining on all non-inlined edges that lead out
3134 of this subtree. Newly discovered indirect edges will be added to
3135 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3136 created. */
3137
3138 static bool
3139 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3140 struct cgraph_node *node,
3141 vec<cgraph_edge_p> *new_edges)
3142 {
3143 struct cgraph_edge *e;
3144 bool res;
3145
3146 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3147
3148 for (e = node->callees; e; e = e->next_callee)
3149 if (!e->inline_failed)
3150 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3151 else
3152 update_jump_functions_after_inlining (cs, e);
3153 for (e = node->indirect_calls; e; e = e->next_callee)
3154 update_jump_functions_after_inlining (cs, e);
3155
3156 return res;
3157 }
3158
3159 /* Combine two controlled uses counts as done during inlining. */
3160
3161 static int
3162 combine_controlled_uses_counters (int c, int d)
3163 {
3164 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3165 return IPA_UNDESCRIBED_USE;
3166 else
3167 return c + d - 1;
3168 }
3169
3170 /* Propagate number of controlled users from CS->caleee to the new root of the
3171 tree of inlined nodes. */
3172
3173 static void
3174 propagate_controlled_uses (struct cgraph_edge *cs)
3175 {
3176 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3177 struct cgraph_node *new_root = cs->caller->global.inlined_to
3178 ? cs->caller->global.inlined_to : cs->caller;
3179 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3180 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3181 int count, i;
3182
3183 count = MIN (ipa_get_cs_argument_count (args),
3184 ipa_get_param_count (old_root_info));
3185 for (i = 0; i < count; i++)
3186 {
3187 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3188 struct ipa_cst_ref_desc *rdesc;
3189
3190 if (jf->type == IPA_JF_PASS_THROUGH)
3191 {
3192 int src_idx, c, d;
3193 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3194 c = ipa_get_controlled_uses (new_root_info, src_idx);
3195 d = ipa_get_controlled_uses (old_root_info, i);
3196
3197 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3198 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3199 c = combine_controlled_uses_counters (c, d);
3200 ipa_set_controlled_uses (new_root_info, src_idx, c);
3201 if (c == 0 && new_root_info->ipcp_orig_node)
3202 {
3203 struct cgraph_node *n;
3204 struct ipa_ref *ref;
3205 tree t = new_root_info->known_vals[src_idx];
3206
3207 if (t && TREE_CODE (t) == ADDR_EXPR
3208 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3209 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
3210 && (ref = new_root->find_reference (n, NULL, 0)))
3211 {
3212 if (dump_file)
3213 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3214 "reference from %s/%i to %s/%i.\n",
3215 xstrdup (new_root->name ()),
3216 new_root->order,
3217 xstrdup (n->name ()), n->order);
3218 ref->remove_reference ();
3219 }
3220 }
3221 }
3222 else if (jf->type == IPA_JF_CONST
3223 && (rdesc = jfunc_rdesc_usable (jf)))
3224 {
3225 int d = ipa_get_controlled_uses (old_root_info, i);
3226 int c = rdesc->refcount;
3227 rdesc->refcount = combine_controlled_uses_counters (c, d);
3228 if (rdesc->refcount == 0)
3229 {
3230 tree cst = ipa_get_jf_constant (jf);
3231 struct cgraph_node *n;
3232 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3233 && TREE_CODE (TREE_OPERAND (cst, 0))
3234 == FUNCTION_DECL);
3235 n = cgraph_get_node (TREE_OPERAND (cst, 0));
3236 if (n)
3237 {
3238 struct cgraph_node *clone;
3239 bool ok;
3240 ok = remove_described_reference (n, rdesc);
3241 gcc_checking_assert (ok);
3242
3243 clone = cs->caller;
3244 while (clone->global.inlined_to
3245 && clone != rdesc->cs->caller
3246 && IPA_NODE_REF (clone)->ipcp_orig_node)
3247 {
3248 struct ipa_ref *ref;
3249 ref = clone->find_reference (n, NULL, 0);
3250 if (ref)
3251 {
3252 if (dump_file)
3253 fprintf (dump_file, "ipa-prop: Removing "
3254 "cloning-created reference "
3255 "from %s/%i to %s/%i.\n",
3256 xstrdup (clone->name ()),
3257 clone->order,
3258 xstrdup (n->name ()),
3259 n->order);
3260 ref->remove_reference ();
3261 }
3262 clone = clone->callers->caller;
3263 }
3264 }
3265 }
3266 }
3267 }
3268
3269 for (i = ipa_get_param_count (old_root_info);
3270 i < ipa_get_cs_argument_count (args);
3271 i++)
3272 {
3273 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3274
3275 if (jf->type == IPA_JF_CONST)
3276 {
3277 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3278 if (rdesc)
3279 rdesc->refcount = IPA_UNDESCRIBED_USE;
3280 }
3281 else if (jf->type == IPA_JF_PASS_THROUGH)
3282 ipa_set_controlled_uses (new_root_info,
3283 jf->value.pass_through.formal_id,
3284 IPA_UNDESCRIBED_USE);
3285 }
3286 }
3287
3288 /* Update jump functions and call note functions on inlining the call site CS.
3289 CS is expected to lead to a node already cloned by
3290 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3291 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3292 created. */
3293
3294 bool
3295 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3296 vec<cgraph_edge_p> *new_edges)
3297 {
3298 bool changed;
3299 /* Do nothing if the preparation phase has not been carried out yet
3300 (i.e. during early inlining). */
3301 if (!ipa_node_params_vector.exists ())
3302 return false;
3303 gcc_assert (ipa_edge_args_vector);
3304
3305 propagate_controlled_uses (cs);
3306 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3307
3308 return changed;
3309 }
3310
3311 /* Frees all dynamically allocated structures that the argument info points
3312 to. */
3313
3314 void
3315 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3316 {
3317 vec_free (args->jump_functions);
3318 memset (args, 0, sizeof (*args));
3319 }
3320
3321 /* Free all ipa_edge structures. */
3322
3323 void
3324 ipa_free_all_edge_args (void)
3325 {
3326 int i;
3327 struct ipa_edge_args *args;
3328
3329 if (!ipa_edge_args_vector)
3330 return;
3331
3332 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3333 ipa_free_edge_args_substructures (args);
3334
3335 vec_free (ipa_edge_args_vector);
3336 }
3337
3338 /* Frees all dynamically allocated structures that the param info points
3339 to. */
3340
3341 void
3342 ipa_free_node_params_substructures (struct ipa_node_params *info)
3343 {
3344 info->descriptors.release ();
3345 free (info->lattices);
3346 /* Lattice values and their sources are deallocated with their alocation
3347 pool. */
3348 info->known_vals.release ();
3349 memset (info, 0, sizeof (*info));
3350 }
3351
3352 /* Free all ipa_node_params structures. */
3353
3354 void
3355 ipa_free_all_node_params (void)
3356 {
3357 int i;
3358 struct ipa_node_params *info;
3359
3360 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3361 ipa_free_node_params_substructures (info);
3362
3363 ipa_node_params_vector.release ();
3364 }
3365
3366 /* Set the aggregate replacements of NODE to be AGGVALS. */
3367
3368 void
3369 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3370 struct ipa_agg_replacement_value *aggvals)
3371 {
3372 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3373 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3374
3375 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3376 }
3377
3378 /* Hook that is called by cgraph.c when an edge is removed. */
3379
3380 static void
3381 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3382 {
3383 struct ipa_edge_args *args;
3384
3385 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3386 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3387 return;
3388
3389 args = IPA_EDGE_REF (cs);
3390 if (args->jump_functions)
3391 {
3392 struct ipa_jump_func *jf;
3393 int i;
3394 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3395 {
3396 struct ipa_cst_ref_desc *rdesc;
3397 try_decrement_rdesc_refcount (jf);
3398 if (jf->type == IPA_JF_CONST
3399 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3400 && rdesc->cs == cs)
3401 rdesc->cs = NULL;
3402 }
3403 }
3404
3405 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3406 }
3407
3408 /* Hook that is called by cgraph.c when a node is removed. */
3409
3410 static void
3411 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3412 {
3413 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3414 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3415 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3416 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3417 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3418 }
3419
3420 /* Hook that is called by cgraph.c when an edge is duplicated. */
3421
3422 static void
3423 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3424 __attribute__((unused)) void *data)
3425 {
3426 struct ipa_edge_args *old_args, *new_args;
3427 unsigned int i;
3428
3429 ipa_check_create_edge_args ();
3430
3431 old_args = IPA_EDGE_REF (src);
3432 new_args = IPA_EDGE_REF (dst);
3433
3434 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3435
3436 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3437 {
3438 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3439 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3440
3441 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3442
3443 if (src_jf->type == IPA_JF_CONST)
3444 {
3445 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3446
3447 if (!src_rdesc)
3448 dst_jf->value.constant.rdesc = NULL;
3449 else if (src->caller == dst->caller)
3450 {
3451 struct ipa_ref *ref;
3452 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3453 gcc_checking_assert (n);
3454 ref = src->caller->find_reference (n, src->call_stmt,
3455 src->lto_stmt_uid);
3456 gcc_checking_assert (ref);
3457 dst->caller->clone_reference (ref, ref->stmt);
3458
3459 gcc_checking_assert (ipa_refdesc_pool);
3460 struct ipa_cst_ref_desc *dst_rdesc
3461 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3462 dst_rdesc->cs = dst;
3463 dst_rdesc->refcount = src_rdesc->refcount;
3464 dst_rdesc->next_duplicate = NULL;
3465 dst_jf->value.constant.rdesc = dst_rdesc;
3466 }
3467 else if (src_rdesc->cs == src)
3468 {
3469 struct ipa_cst_ref_desc *dst_rdesc;
3470 gcc_checking_assert (ipa_refdesc_pool);
3471 dst_rdesc
3472 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3473 dst_rdesc->cs = dst;
3474 dst_rdesc->refcount = src_rdesc->refcount;
3475 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3476 src_rdesc->next_duplicate = dst_rdesc;
3477 dst_jf->value.constant.rdesc = dst_rdesc;
3478 }
3479 else
3480 {
3481 struct ipa_cst_ref_desc *dst_rdesc;
3482 /* This can happen during inlining, when a JFUNC can refer to a
3483 reference taken in a function up in the tree of inline clones.
3484 We need to find the duplicate that refers to our tree of
3485 inline clones. */
3486
3487 gcc_assert (dst->caller->global.inlined_to);
3488 for (dst_rdesc = src_rdesc->next_duplicate;
3489 dst_rdesc;
3490 dst_rdesc = dst_rdesc->next_duplicate)
3491 {
3492 struct cgraph_node *top;
3493 top = dst_rdesc->cs->caller->global.inlined_to
3494 ? dst_rdesc->cs->caller->global.inlined_to
3495 : dst_rdesc->cs->caller;
3496 if (dst->caller->global.inlined_to == top)
3497 break;
3498 }
3499 gcc_assert (dst_rdesc);
3500 dst_jf->value.constant.rdesc = dst_rdesc;
3501 }
3502 }
3503 }
3504 }
3505
3506 /* Hook that is called by cgraph.c when a node is duplicated. */
3507
3508 static void
3509 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3510 ATTRIBUTE_UNUSED void *data)
3511 {
3512 struct ipa_node_params *old_info, *new_info;
3513 struct ipa_agg_replacement_value *old_av, *new_av;
3514
3515 ipa_check_create_node_params ();
3516 old_info = IPA_NODE_REF (src);
3517 new_info = IPA_NODE_REF (dst);
3518
3519 new_info->descriptors = old_info->descriptors.copy ();
3520 new_info->lattices = NULL;
3521 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3522
3523 new_info->analysis_done = old_info->analysis_done;
3524 new_info->node_enqueued = old_info->node_enqueued;
3525
3526 old_av = ipa_get_agg_replacements_for_node (src);
3527 if (!old_av)
3528 return;
3529
3530 new_av = NULL;
3531 while (old_av)
3532 {
3533 struct ipa_agg_replacement_value *v;
3534
3535 v = ggc_alloc<ipa_agg_replacement_value> ();
3536 memcpy (v, old_av, sizeof (*v));
3537 v->next = new_av;
3538 new_av = v;
3539 old_av = old_av->next;
3540 }
3541 ipa_set_node_agg_value_chain (dst, new_av);
3542 }
3543
3544
3545 /* Analyze newly added function into callgraph. */
3546
3547 static void
3548 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3549 {
3550 if (cgraph_function_with_gimple_body_p (node))
3551 ipa_analyze_node (node);
3552 }
3553
3554 /* Register our cgraph hooks if they are not already there. */
3555
3556 void
3557 ipa_register_cgraph_hooks (void)
3558 {
3559 if (!edge_removal_hook_holder)
3560 edge_removal_hook_holder =
3561 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3562 if (!node_removal_hook_holder)
3563 node_removal_hook_holder =
3564 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3565 if (!edge_duplication_hook_holder)
3566 edge_duplication_hook_holder =
3567 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3568 if (!node_duplication_hook_holder)
3569 node_duplication_hook_holder =
3570 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3571 function_insertion_hook_holder =
3572 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3573 }
3574
3575 /* Unregister our cgraph hooks if they are not already there. */
3576
3577 static void
3578 ipa_unregister_cgraph_hooks (void)
3579 {
3580 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3581 edge_removal_hook_holder = NULL;
3582 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3583 node_removal_hook_holder = NULL;
3584 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3585 edge_duplication_hook_holder = NULL;
3586 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3587 node_duplication_hook_holder = NULL;
3588 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3589 function_insertion_hook_holder = NULL;
3590 }
3591
3592 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3593 longer needed after ipa-cp. */
3594
3595 void
3596 ipa_free_all_structures_after_ipa_cp (void)
3597 {
3598 if (!optimize)
3599 {
3600 ipa_free_all_edge_args ();
3601 ipa_free_all_node_params ();
3602 free_alloc_pool (ipcp_sources_pool);
3603 free_alloc_pool (ipcp_values_pool);
3604 free_alloc_pool (ipcp_agg_lattice_pool);
3605 ipa_unregister_cgraph_hooks ();
3606 if (ipa_refdesc_pool)
3607 free_alloc_pool (ipa_refdesc_pool);
3608 }
3609 }
3610
3611 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3612 longer needed after indirect inlining. */
3613
3614 void
3615 ipa_free_all_structures_after_iinln (void)
3616 {
3617 ipa_free_all_edge_args ();
3618 ipa_free_all_node_params ();
3619 ipa_unregister_cgraph_hooks ();
3620 if (ipcp_sources_pool)
3621 free_alloc_pool (ipcp_sources_pool);
3622 if (ipcp_values_pool)
3623 free_alloc_pool (ipcp_values_pool);
3624 if (ipcp_agg_lattice_pool)
3625 free_alloc_pool (ipcp_agg_lattice_pool);
3626 if (ipa_refdesc_pool)
3627 free_alloc_pool (ipa_refdesc_pool);
3628 }
3629
3630 /* Print ipa_tree_map data structures of all functions in the
3631 callgraph to F. */
3632
3633 void
3634 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3635 {
3636 int i, count;
3637 struct ipa_node_params *info;
3638
3639 if (!node->definition)
3640 return;
3641 info = IPA_NODE_REF (node);
3642 fprintf (f, " function %s/%i parameter descriptors:\n",
3643 node->name (), node->order);
3644 count = ipa_get_param_count (info);
3645 for (i = 0; i < count; i++)
3646 {
3647 int c;
3648
3649 fprintf (f, " ");
3650 ipa_dump_param (f, info, i);
3651 if (ipa_is_param_used (info, i))
3652 fprintf (f, " used");
3653 c = ipa_get_controlled_uses (info, i);
3654 if (c == IPA_UNDESCRIBED_USE)
3655 fprintf (f, " undescribed_use");
3656 else
3657 fprintf (f, " controlled_uses=%i", c);
3658 fprintf (f, "\n");
3659 }
3660 }
3661
3662 /* Print ipa_tree_map data structures of all functions in the
3663 callgraph to F. */
3664
3665 void
3666 ipa_print_all_params (FILE * f)
3667 {
3668 struct cgraph_node *node;
3669
3670 fprintf (f, "\nFunction parameters:\n");
3671 FOR_EACH_FUNCTION (node)
3672 ipa_print_node_params (f, node);
3673 }
3674
3675 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3676
3677 vec<tree>
3678 ipa_get_vector_of_formal_parms (tree fndecl)
3679 {
3680 vec<tree> args;
3681 int count;
3682 tree parm;
3683
3684 gcc_assert (!flag_wpa);
3685 count = count_formal_params (fndecl);
3686 args.create (count);
3687 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3688 args.quick_push (parm);
3689
3690 return args;
3691 }
3692
3693 /* Return a heap allocated vector containing types of formal parameters of
3694 function type FNTYPE. */
3695
3696 vec<tree>
3697 ipa_get_vector_of_formal_parm_types (tree fntype)
3698 {
3699 vec<tree> types;
3700 int count = 0;
3701 tree t;
3702
3703 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3704 count++;
3705
3706 types.create (count);
3707 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3708 types.quick_push (TREE_VALUE (t));
3709
3710 return types;
3711 }
3712
3713 /* Modify the function declaration FNDECL and its type according to the plan in
3714 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3715 to reflect the actual parameters being modified which are determined by the
3716 base_index field. */
3717
3718 void
3719 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3720 {
3721 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3722 tree orig_type = TREE_TYPE (fndecl);
3723 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3724
3725 /* The following test is an ugly hack, some functions simply don't have any
3726 arguments in their type. This is probably a bug but well... */
3727 bool care_for_types = (old_arg_types != NULL_TREE);
3728 bool last_parm_void;
3729 vec<tree> otypes;
3730 if (care_for_types)
3731 {
3732 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3733 == void_type_node);
3734 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3735 if (last_parm_void)
3736 gcc_assert (oparms.length () + 1 == otypes.length ());
3737 else
3738 gcc_assert (oparms.length () == otypes.length ());
3739 }
3740 else
3741 {
3742 last_parm_void = false;
3743 otypes.create (0);
3744 }
3745
3746 int len = adjustments.length ();
3747 tree *link = &DECL_ARGUMENTS (fndecl);
3748 tree new_arg_types = NULL;
3749 for (int i = 0; i < len; i++)
3750 {
3751 struct ipa_parm_adjustment *adj;
3752 gcc_assert (link);
3753
3754 adj = &adjustments[i];
3755 tree parm;
3756 if (adj->op == IPA_PARM_OP_NEW)
3757 parm = NULL;
3758 else
3759 parm = oparms[adj->base_index];
3760 adj->base = parm;
3761
3762 if (adj->op == IPA_PARM_OP_COPY)
3763 {
3764 if (care_for_types)
3765 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3766 new_arg_types);
3767 *link = parm;
3768 link = &DECL_CHAIN (parm);
3769 }
3770 else if (adj->op != IPA_PARM_OP_REMOVE)
3771 {
3772 tree new_parm;
3773 tree ptype;
3774
3775 if (adj->by_ref)
3776 ptype = build_pointer_type (adj->type);
3777 else
3778 {
3779 ptype = adj->type;
3780 if (is_gimple_reg_type (ptype))
3781 {
3782 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3783 if (TYPE_ALIGN (ptype) < malign)
3784 ptype = build_aligned_type (ptype, malign);
3785 }
3786 }
3787
3788 if (care_for_types)
3789 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3790
3791 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3792 ptype);
3793 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3794 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3795 DECL_ARTIFICIAL (new_parm) = 1;
3796 DECL_ARG_TYPE (new_parm) = ptype;
3797 DECL_CONTEXT (new_parm) = fndecl;
3798 TREE_USED (new_parm) = 1;
3799 DECL_IGNORED_P (new_parm) = 1;
3800 layout_decl (new_parm, 0);
3801
3802 if (adj->op == IPA_PARM_OP_NEW)
3803 adj->base = NULL;
3804 else
3805 adj->base = parm;
3806 adj->new_decl = new_parm;
3807
3808 *link = new_parm;
3809 link = &DECL_CHAIN (new_parm);
3810 }
3811 }
3812
3813 *link = NULL_TREE;
3814
3815 tree new_reversed = NULL;
3816 if (care_for_types)
3817 {
3818 new_reversed = nreverse (new_arg_types);
3819 if (last_parm_void)
3820 {
3821 if (new_reversed)
3822 TREE_CHAIN (new_arg_types) = void_list_node;
3823 else
3824 new_reversed = void_list_node;
3825 }
3826 }
3827
3828 /* Use copy_node to preserve as much as possible from original type
3829 (debug info, attribute lists etc.)
3830 Exception is METHOD_TYPEs must have THIS argument.
3831 When we are asked to remove it, we need to build new FUNCTION_TYPE
3832 instead. */
3833 tree new_type = NULL;
3834 if (TREE_CODE (orig_type) != METHOD_TYPE
3835 || (adjustments[0].op == IPA_PARM_OP_COPY
3836 && adjustments[0].base_index == 0))
3837 {
3838 new_type = build_distinct_type_copy (orig_type);
3839 TYPE_ARG_TYPES (new_type) = new_reversed;
3840 }
3841 else
3842 {
3843 new_type
3844 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3845 new_reversed));
3846 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3847 DECL_VINDEX (fndecl) = NULL_TREE;
3848 }
3849
3850 /* When signature changes, we need to clear builtin info. */
3851 if (DECL_BUILT_IN (fndecl))
3852 {
3853 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3854 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3855 }
3856
3857 /* This is a new type, not a copy of an old type. Need to reassociate
3858 variants. We can handle everything except the main variant lazily. */
3859 tree t = TYPE_MAIN_VARIANT (orig_type);
3860 if (orig_type != t)
3861 {
3862 TYPE_MAIN_VARIANT (new_type) = t;
3863 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3864 TYPE_NEXT_VARIANT (t) = new_type;
3865 }
3866 else
3867 {
3868 TYPE_MAIN_VARIANT (new_type) = new_type;
3869 TYPE_NEXT_VARIANT (new_type) = NULL;
3870 }
3871
3872 TREE_TYPE (fndecl) = new_type;
3873 DECL_VIRTUAL_P (fndecl) = 0;
3874 DECL_LANG_SPECIFIC (fndecl) = NULL;
3875 otypes.release ();
3876 oparms.release ();
3877 }
3878
3879 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3880 If this is a directly recursive call, CS must be NULL. Otherwise it must
3881 contain the corresponding call graph edge. */
3882
3883 void
3884 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3885 ipa_parm_adjustment_vec adjustments)
3886 {
3887 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3888 vec<tree> vargs;
3889 vec<tree, va_gc> **debug_args = NULL;
3890 gimple new_stmt;
3891 gimple_stmt_iterator gsi, prev_gsi;
3892 tree callee_decl;
3893 int i, len;
3894
3895 len = adjustments.length ();
3896 vargs.create (len);
3897 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3898 current_node->remove_stmt_references (stmt);
3899
3900 gsi = gsi_for_stmt (stmt);
3901 prev_gsi = gsi;
3902 gsi_prev (&prev_gsi);
3903 for (i = 0; i < len; i++)
3904 {
3905 struct ipa_parm_adjustment *adj;
3906
3907 adj = &adjustments[i];
3908
3909 if (adj->op == IPA_PARM_OP_COPY)
3910 {
3911 tree arg = gimple_call_arg (stmt, adj->base_index);
3912
3913 vargs.quick_push (arg);
3914 }
3915 else if (adj->op != IPA_PARM_OP_REMOVE)
3916 {
3917 tree expr, base, off;
3918 location_t loc;
3919 unsigned int deref_align = 0;
3920 bool deref_base = false;
3921
3922 /* We create a new parameter out of the value of the old one, we can
3923 do the following kind of transformations:
3924
3925 - A scalar passed by reference is converted to a scalar passed by
3926 value. (adj->by_ref is false and the type of the original
3927 actual argument is a pointer to a scalar).
3928
3929 - A part of an aggregate is passed instead of the whole aggregate.
3930 The part can be passed either by value or by reference, this is
3931 determined by value of adj->by_ref. Moreover, the code below
3932 handles both situations when the original aggregate is passed by
3933 value (its type is not a pointer) and when it is passed by
3934 reference (it is a pointer to an aggregate).
3935
3936 When the new argument is passed by reference (adj->by_ref is true)
3937 it must be a part of an aggregate and therefore we form it by
3938 simply taking the address of a reference inside the original
3939 aggregate. */
3940
3941 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3942 base = gimple_call_arg (stmt, adj->base_index);
3943 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3944 : EXPR_LOCATION (base);
3945
3946 if (TREE_CODE (base) != ADDR_EXPR
3947 && POINTER_TYPE_P (TREE_TYPE (base)))
3948 off = build_int_cst (adj->alias_ptr_type,
3949 adj->offset / BITS_PER_UNIT);
3950 else
3951 {
3952 HOST_WIDE_INT base_offset;
3953 tree prev_base;
3954 bool addrof;
3955
3956 if (TREE_CODE (base) == ADDR_EXPR)
3957 {
3958 base = TREE_OPERAND (base, 0);
3959 addrof = true;
3960 }
3961 else
3962 addrof = false;
3963 prev_base = base;
3964 base = get_addr_base_and_unit_offset (base, &base_offset);
3965 /* Aggregate arguments can have non-invariant addresses. */
3966 if (!base)
3967 {
3968 base = build_fold_addr_expr (prev_base);
3969 off = build_int_cst (adj->alias_ptr_type,
3970 adj->offset / BITS_PER_UNIT);
3971 }
3972 else if (TREE_CODE (base) == MEM_REF)
3973 {
3974 if (!addrof)
3975 {
3976 deref_base = true;
3977 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3978 }
3979 off = build_int_cst (adj->alias_ptr_type,
3980 base_offset
3981 + adj->offset / BITS_PER_UNIT);
3982 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3983 off);
3984 base = TREE_OPERAND (base, 0);
3985 }
3986 else
3987 {
3988 off = build_int_cst (adj->alias_ptr_type,
3989 base_offset
3990 + adj->offset / BITS_PER_UNIT);
3991 base = build_fold_addr_expr (base);
3992 }
3993 }
3994
3995 if (!adj->by_ref)
3996 {
3997 tree type = adj->type;
3998 unsigned int align;
3999 unsigned HOST_WIDE_INT misalign;
4000
4001 if (deref_base)
4002 {
4003 align = deref_align;
4004 misalign = 0;
4005 }
4006 else
4007 {
4008 get_pointer_alignment_1 (base, &align, &misalign);
4009 if (TYPE_ALIGN (type) > align)
4010 align = TYPE_ALIGN (type);
4011 }
4012 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4013 * BITS_PER_UNIT);
4014 misalign = misalign & (align - 1);
4015 if (misalign != 0)
4016 align = (misalign & -misalign);
4017 if (align < TYPE_ALIGN (type))
4018 type = build_aligned_type (type, align);
4019 base = force_gimple_operand_gsi (&gsi, base,
4020 true, NULL, true, GSI_SAME_STMT);
4021 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4022 /* If expr is not a valid gimple call argument emit
4023 a load into a temporary. */
4024 if (is_gimple_reg_type (TREE_TYPE (expr)))
4025 {
4026 gimple tem = gimple_build_assign (NULL_TREE, expr);
4027 if (gimple_in_ssa_p (cfun))
4028 {
4029 gimple_set_vuse (tem, gimple_vuse (stmt));
4030 expr = make_ssa_name (TREE_TYPE (expr), tem);
4031 }
4032 else
4033 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4034 gimple_assign_set_lhs (tem, expr);
4035 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4036 }
4037 }
4038 else
4039 {
4040 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4041 expr = build_fold_addr_expr (expr);
4042 expr = force_gimple_operand_gsi (&gsi, expr,
4043 true, NULL, true, GSI_SAME_STMT);
4044 }
4045 vargs.quick_push (expr);
4046 }
4047 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4048 {
4049 unsigned int ix;
4050 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4051 gimple def_temp;
4052
4053 arg = gimple_call_arg (stmt, adj->base_index);
4054 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4055 {
4056 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4057 continue;
4058 arg = fold_convert_loc (gimple_location (stmt),
4059 TREE_TYPE (origin), arg);
4060 }
4061 if (debug_args == NULL)
4062 debug_args = decl_debug_args_insert (callee_decl);
4063 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4064 if (ddecl == origin)
4065 {
4066 ddecl = (**debug_args)[ix + 1];
4067 break;
4068 }
4069 if (ddecl == NULL)
4070 {
4071 ddecl = make_node (DEBUG_EXPR_DECL);
4072 DECL_ARTIFICIAL (ddecl) = 1;
4073 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4074 DECL_MODE (ddecl) = DECL_MODE (origin);
4075
4076 vec_safe_push (*debug_args, origin);
4077 vec_safe_push (*debug_args, ddecl);
4078 }
4079 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4080 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4081 }
4082 }
4083
4084 if (dump_file && (dump_flags & TDF_DETAILS))
4085 {
4086 fprintf (dump_file, "replacing stmt:");
4087 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4088 }
4089
4090 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4091 vargs.release ();
4092 if (gimple_call_lhs (stmt))
4093 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4094
4095 gimple_set_block (new_stmt, gimple_block (stmt));
4096 if (gimple_has_location (stmt))
4097 gimple_set_location (new_stmt, gimple_location (stmt));
4098 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4099 gimple_call_copy_flags (new_stmt, stmt);
4100 if (gimple_in_ssa_p (cfun))
4101 {
4102 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4103 if (gimple_vdef (stmt))
4104 {
4105 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4106 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4107 }
4108 }
4109
4110 if (dump_file && (dump_flags & TDF_DETAILS))
4111 {
4112 fprintf (dump_file, "with stmt:");
4113 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4114 fprintf (dump_file, "\n");
4115 }
4116 gsi_replace (&gsi, new_stmt, true);
4117 if (cs)
4118 cgraph_set_call_stmt (cs, new_stmt);
4119 do
4120 {
4121 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
4122 gsi_prev (&gsi);
4123 }
4124 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4125 }
4126
4127 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4128 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4129 specifies whether the function should care about type incompatibility the
4130 current and new expressions. If it is false, the function will leave
4131 incompatibility issues to the caller. Return true iff the expression
4132 was modified. */
4133
4134 bool
4135 ipa_modify_expr (tree *expr, bool convert,
4136 ipa_parm_adjustment_vec adjustments)
4137 {
4138 struct ipa_parm_adjustment *cand
4139 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4140 if (!cand)
4141 return false;
4142
4143 tree src;
4144 if (cand->by_ref)
4145 src = build_simple_mem_ref (cand->new_decl);
4146 else
4147 src = cand->new_decl;
4148
4149 if (dump_file && (dump_flags & TDF_DETAILS))
4150 {
4151 fprintf (dump_file, "About to replace expr ");
4152 print_generic_expr (dump_file, *expr, 0);
4153 fprintf (dump_file, " with ");
4154 print_generic_expr (dump_file, src, 0);
4155 fprintf (dump_file, "\n");
4156 }
4157
4158 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4159 {
4160 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4161 *expr = vce;
4162 }
4163 else
4164 *expr = src;
4165 return true;
4166 }
4167
4168 /* If T is an SSA_NAME, return NULL if it is not a default def or
4169 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4170 the base variable is always returned, regardless if it is a default
4171 def. Return T if it is not an SSA_NAME. */
4172
4173 static tree
4174 get_ssa_base_param (tree t, bool ignore_default_def)
4175 {
4176 if (TREE_CODE (t) == SSA_NAME)
4177 {
4178 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4179 return SSA_NAME_VAR (t);
4180 else
4181 return NULL_TREE;
4182 }
4183 return t;
4184 }
4185
4186 /* Given an expression, return an adjustment entry specifying the
4187 transformation to be done on EXPR. If no suitable adjustment entry
4188 was found, returns NULL.
4189
4190 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4191 default def, otherwise bail on them.
4192
4193 If CONVERT is non-NULL, this function will set *CONVERT if the
4194 expression provided is a component reference. ADJUSTMENTS is the
4195 adjustments vector. */
4196
4197 ipa_parm_adjustment *
4198 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4199 ipa_parm_adjustment_vec adjustments,
4200 bool ignore_default_def)
4201 {
4202 if (TREE_CODE (**expr) == BIT_FIELD_REF
4203 || TREE_CODE (**expr) == IMAGPART_EXPR
4204 || TREE_CODE (**expr) == REALPART_EXPR)
4205 {
4206 *expr = &TREE_OPERAND (**expr, 0);
4207 if (convert)
4208 *convert = true;
4209 }
4210
4211 HOST_WIDE_INT offset, size, max_size;
4212 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4213 if (!base || size == -1 || max_size == -1)
4214 return NULL;
4215
4216 if (TREE_CODE (base) == MEM_REF)
4217 {
4218 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4219 base = TREE_OPERAND (base, 0);
4220 }
4221
4222 base = get_ssa_base_param (base, ignore_default_def);
4223 if (!base || TREE_CODE (base) != PARM_DECL)
4224 return NULL;
4225
4226 struct ipa_parm_adjustment *cand = NULL;
4227 unsigned int len = adjustments.length ();
4228 for (unsigned i = 0; i < len; i++)
4229 {
4230 struct ipa_parm_adjustment *adj = &adjustments[i];
4231
4232 if (adj->base == base
4233 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4234 {
4235 cand = adj;
4236 break;
4237 }
4238 }
4239
4240 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4241 return NULL;
4242 return cand;
4243 }
4244
4245 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4246
4247 static bool
4248 index_in_adjustments_multiple_times_p (int base_index,
4249 ipa_parm_adjustment_vec adjustments)
4250 {
4251 int i, len = adjustments.length ();
4252 bool one = false;
4253
4254 for (i = 0; i < len; i++)
4255 {
4256 struct ipa_parm_adjustment *adj;
4257 adj = &adjustments[i];
4258
4259 if (adj->base_index == base_index)
4260 {
4261 if (one)
4262 return true;
4263 else
4264 one = true;
4265 }
4266 }
4267 return false;
4268 }
4269
4270
4271 /* Return adjustments that should have the same effect on function parameters
4272 and call arguments as if they were first changed according to adjustments in
4273 INNER and then by adjustments in OUTER. */
4274
4275 ipa_parm_adjustment_vec
4276 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4277 ipa_parm_adjustment_vec outer)
4278 {
4279 int i, outlen = outer.length ();
4280 int inlen = inner.length ();
4281 int removals = 0;
4282 ipa_parm_adjustment_vec adjustments, tmp;
4283
4284 tmp.create (inlen);
4285 for (i = 0; i < inlen; i++)
4286 {
4287 struct ipa_parm_adjustment *n;
4288 n = &inner[i];
4289
4290 if (n->op == IPA_PARM_OP_REMOVE)
4291 removals++;
4292 else
4293 {
4294 /* FIXME: Handling of new arguments are not implemented yet. */
4295 gcc_assert (n->op != IPA_PARM_OP_NEW);
4296 tmp.quick_push (*n);
4297 }
4298 }
4299
4300 adjustments.create (outlen + removals);
4301 for (i = 0; i < outlen; i++)
4302 {
4303 struct ipa_parm_adjustment r;
4304 struct ipa_parm_adjustment *out = &outer[i];
4305 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4306
4307 memset (&r, 0, sizeof (r));
4308 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4309 if (out->op == IPA_PARM_OP_REMOVE)
4310 {
4311 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4312 {
4313 r.op = IPA_PARM_OP_REMOVE;
4314 adjustments.quick_push (r);
4315 }
4316 continue;
4317 }
4318 else
4319 {
4320 /* FIXME: Handling of new arguments are not implemented yet. */
4321 gcc_assert (out->op != IPA_PARM_OP_NEW);
4322 }
4323
4324 r.base_index = in->base_index;
4325 r.type = out->type;
4326
4327 /* FIXME: Create nonlocal value too. */
4328
4329 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4330 r.op = IPA_PARM_OP_COPY;
4331 else if (in->op == IPA_PARM_OP_COPY)
4332 r.offset = out->offset;
4333 else if (out->op == IPA_PARM_OP_COPY)
4334 r.offset = in->offset;
4335 else
4336 r.offset = in->offset + out->offset;
4337 adjustments.quick_push (r);
4338 }
4339
4340 for (i = 0; i < inlen; i++)
4341 {
4342 struct ipa_parm_adjustment *n = &inner[i];
4343
4344 if (n->op == IPA_PARM_OP_REMOVE)
4345 adjustments.quick_push (*n);
4346 }
4347
4348 tmp.release ();
4349 return adjustments;
4350 }
4351
4352 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4353 friendly way, assuming they are meant to be applied to FNDECL. */
4354
4355 void
4356 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4357 tree fndecl)
4358 {
4359 int i, len = adjustments.length ();
4360 bool first = true;
4361 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4362
4363 fprintf (file, "IPA param adjustments: ");
4364 for (i = 0; i < len; i++)
4365 {
4366 struct ipa_parm_adjustment *adj;
4367 adj = &adjustments[i];
4368
4369 if (!first)
4370 fprintf (file, " ");
4371 else
4372 first = false;
4373
4374 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4375 print_generic_expr (file, parms[adj->base_index], 0);
4376 if (adj->base)
4377 {
4378 fprintf (file, ", base: ");
4379 print_generic_expr (file, adj->base, 0);
4380 }
4381 if (adj->new_decl)
4382 {
4383 fprintf (file, ", new_decl: ");
4384 print_generic_expr (file, adj->new_decl, 0);
4385 }
4386 if (adj->new_ssa_base)
4387 {
4388 fprintf (file, ", new_ssa_base: ");
4389 print_generic_expr (file, adj->new_ssa_base, 0);
4390 }
4391
4392 if (adj->op == IPA_PARM_OP_COPY)
4393 fprintf (file, ", copy_param");
4394 else if (adj->op == IPA_PARM_OP_REMOVE)
4395 fprintf (file, ", remove_param");
4396 else
4397 fprintf (file, ", offset %li", (long) adj->offset);
4398 if (adj->by_ref)
4399 fprintf (file, ", by_ref");
4400 print_node_brief (file, ", type: ", adj->type, 0);
4401 fprintf (file, "\n");
4402 }
4403 parms.release ();
4404 }
4405
4406 /* Dump the AV linked list. */
4407
4408 void
4409 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4410 {
4411 bool comma = false;
4412 fprintf (f, " Aggregate replacements:");
4413 for (; av; av = av->next)
4414 {
4415 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4416 av->index, av->offset);
4417 print_generic_expr (f, av->value, 0);
4418 comma = true;
4419 }
4420 fprintf (f, "\n");
4421 }
4422
4423 /* Stream out jump function JUMP_FUNC to OB. */
4424
4425 static void
4426 ipa_write_jump_function (struct output_block *ob,
4427 struct ipa_jump_func *jump_func)
4428 {
4429 struct ipa_agg_jf_item *item;
4430 struct bitpack_d bp;
4431 int i, count;
4432
4433 streamer_write_uhwi (ob, jump_func->type);
4434 switch (jump_func->type)
4435 {
4436 case IPA_JF_UNKNOWN:
4437 break;
4438 case IPA_JF_KNOWN_TYPE:
4439 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4440 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4441 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4442 break;
4443 case IPA_JF_CONST:
4444 gcc_assert (
4445 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4446 stream_write_tree (ob, jump_func->value.constant.value, true);
4447 break;
4448 case IPA_JF_PASS_THROUGH:
4449 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4450 if (jump_func->value.pass_through.operation == NOP_EXPR)
4451 {
4452 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4453 bp = bitpack_create (ob->main_stream);
4454 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4455 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4456 streamer_write_bitpack (&bp);
4457 }
4458 else
4459 {
4460 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4461 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4462 }
4463 break;
4464 case IPA_JF_ANCESTOR:
4465 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4466 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4467 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4468 bp = bitpack_create (ob->main_stream);
4469 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4470 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4471 streamer_write_bitpack (&bp);
4472 break;
4473 }
4474
4475 count = vec_safe_length (jump_func->agg.items);
4476 streamer_write_uhwi (ob, count);
4477 if (count)
4478 {
4479 bp = bitpack_create (ob->main_stream);
4480 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4481 streamer_write_bitpack (&bp);
4482 }
4483
4484 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4485 {
4486 streamer_write_uhwi (ob, item->offset);
4487 stream_write_tree (ob, item->value, true);
4488 }
4489 }
4490
4491 /* Read in jump function JUMP_FUNC from IB. */
4492
4493 static void
4494 ipa_read_jump_function (struct lto_input_block *ib,
4495 struct ipa_jump_func *jump_func,
4496 struct cgraph_edge *cs,
4497 struct data_in *data_in)
4498 {
4499 enum jump_func_type jftype;
4500 enum tree_code operation;
4501 int i, count;
4502
4503 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4504 switch (jftype)
4505 {
4506 case IPA_JF_UNKNOWN:
4507 jump_func->type = IPA_JF_UNKNOWN;
4508 break;
4509 case IPA_JF_KNOWN_TYPE:
4510 {
4511 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4512 tree base_type = stream_read_tree (ib, data_in);
4513 tree component_type = stream_read_tree (ib, data_in);
4514
4515 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4516 break;
4517 }
4518 case IPA_JF_CONST:
4519 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4520 break;
4521 case IPA_JF_PASS_THROUGH:
4522 operation = (enum tree_code) streamer_read_uhwi (ib);
4523 if (operation == NOP_EXPR)
4524 {
4525 int formal_id = streamer_read_uhwi (ib);
4526 struct bitpack_d bp = streamer_read_bitpack (ib);
4527 bool agg_preserved = bp_unpack_value (&bp, 1);
4528 bool type_preserved = bp_unpack_value (&bp, 1);
4529 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4530 type_preserved);
4531 }
4532 else
4533 {
4534 tree operand = stream_read_tree (ib, data_in);
4535 int formal_id = streamer_read_uhwi (ib);
4536 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4537 operation);
4538 }
4539 break;
4540 case IPA_JF_ANCESTOR:
4541 {
4542 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4543 tree type = stream_read_tree (ib, data_in);
4544 int formal_id = streamer_read_uhwi (ib);
4545 struct bitpack_d bp = streamer_read_bitpack (ib);
4546 bool agg_preserved = bp_unpack_value (&bp, 1);
4547 bool type_preserved = bp_unpack_value (&bp, 1);
4548
4549 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4550 type_preserved);
4551 break;
4552 }
4553 }
4554
4555 count = streamer_read_uhwi (ib);
4556 vec_alloc (jump_func->agg.items, count);
4557 if (count)
4558 {
4559 struct bitpack_d bp = streamer_read_bitpack (ib);
4560 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4561 }
4562 for (i = 0; i < count; i++)
4563 {
4564 struct ipa_agg_jf_item item;
4565 item.offset = streamer_read_uhwi (ib);
4566 item.value = stream_read_tree (ib, data_in);
4567 jump_func->agg.items->quick_push (item);
4568 }
4569 }
4570
4571 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4572 relevant to indirect inlining to OB. */
4573
4574 static void
4575 ipa_write_indirect_edge_info (struct output_block *ob,
4576 struct cgraph_edge *cs)
4577 {
4578 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4579 struct bitpack_d bp;
4580
4581 streamer_write_hwi (ob, ii->param_index);
4582 streamer_write_hwi (ob, ii->offset);
4583 bp = bitpack_create (ob->main_stream);
4584 bp_pack_value (&bp, ii->polymorphic, 1);
4585 bp_pack_value (&bp, ii->agg_contents, 1);
4586 bp_pack_value (&bp, ii->member_ptr, 1);
4587 bp_pack_value (&bp, ii->by_ref, 1);
4588 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4589 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4590 streamer_write_bitpack (&bp);
4591
4592 if (ii->polymorphic)
4593 {
4594 streamer_write_hwi (ob, ii->otr_token);
4595 stream_write_tree (ob, ii->otr_type, true);
4596 stream_write_tree (ob, ii->outer_type, true);
4597 }
4598 }
4599
4600 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4601 relevant to indirect inlining from IB. */
4602
4603 static void
4604 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4605 struct data_in *data_in ATTRIBUTE_UNUSED,
4606 struct cgraph_edge *cs)
4607 {
4608 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4609 struct bitpack_d bp;
4610
4611 ii->param_index = (int) streamer_read_hwi (ib);
4612 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4613 bp = streamer_read_bitpack (ib);
4614 ii->polymorphic = bp_unpack_value (&bp, 1);
4615 ii->agg_contents = bp_unpack_value (&bp, 1);
4616 ii->member_ptr = bp_unpack_value (&bp, 1);
4617 ii->by_ref = bp_unpack_value (&bp, 1);
4618 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4619 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4620 if (ii->polymorphic)
4621 {
4622 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4623 ii->otr_type = stream_read_tree (ib, data_in);
4624 ii->outer_type = stream_read_tree (ib, data_in);
4625 }
4626 }
4627
4628 /* Stream out NODE info to OB. */
4629
4630 static void
4631 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4632 {
4633 int node_ref;
4634 lto_symtab_encoder_t encoder;
4635 struct ipa_node_params *info = IPA_NODE_REF (node);
4636 int j;
4637 struct cgraph_edge *e;
4638 struct bitpack_d bp;
4639
4640 encoder = ob->decl_state->symtab_node_encoder;
4641 node_ref = lto_symtab_encoder_encode (encoder, node);
4642 streamer_write_uhwi (ob, node_ref);
4643
4644 streamer_write_uhwi (ob, ipa_get_param_count (info));
4645 for (j = 0; j < ipa_get_param_count (info); j++)
4646 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4647 bp = bitpack_create (ob->main_stream);
4648 gcc_assert (info->analysis_done
4649 || ipa_get_param_count (info) == 0);
4650 gcc_assert (!info->node_enqueued);
4651 gcc_assert (!info->ipcp_orig_node);
4652 for (j = 0; j < ipa_get_param_count (info); j++)
4653 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4654 streamer_write_bitpack (&bp);
4655 for (j = 0; j < ipa_get_param_count (info); j++)
4656 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4657 for (e = node->callees; e; e = e->next_callee)
4658 {
4659 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4660
4661 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4662 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4663 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4664 }
4665 for (e = node->indirect_calls; e; e = e->next_callee)
4666 {
4667 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4668
4669 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4670 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4671 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4672 ipa_write_indirect_edge_info (ob, e);
4673 }
4674 }
4675
4676 /* Stream in NODE info from IB. */
4677
4678 static void
4679 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4680 struct data_in *data_in)
4681 {
4682 struct ipa_node_params *info = IPA_NODE_REF (node);
4683 int k;
4684 struct cgraph_edge *e;
4685 struct bitpack_d bp;
4686
4687 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4688
4689 for (k = 0; k < ipa_get_param_count (info); k++)
4690 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4691
4692 bp = streamer_read_bitpack (ib);
4693 if (ipa_get_param_count (info) != 0)
4694 info->analysis_done = true;
4695 info->node_enqueued = false;
4696 for (k = 0; k < ipa_get_param_count (info); k++)
4697 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4698 for (k = 0; k < ipa_get_param_count (info); k++)
4699 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4700 for (e = node->callees; e; e = e->next_callee)
4701 {
4702 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4703 int count = streamer_read_uhwi (ib);
4704
4705 if (!count)
4706 continue;
4707 vec_safe_grow_cleared (args->jump_functions, count);
4708
4709 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4710 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4711 data_in);
4712 }
4713 for (e = node->indirect_calls; e; e = e->next_callee)
4714 {
4715 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4716 int count = streamer_read_uhwi (ib);
4717
4718 if (count)
4719 {
4720 vec_safe_grow_cleared (args->jump_functions, count);
4721 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4722 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4723 data_in);
4724 }
4725 ipa_read_indirect_edge_info (ib, data_in, e);
4726 }
4727 }
4728
4729 /* Write jump functions for nodes in SET. */
4730
4731 void
4732 ipa_prop_write_jump_functions (void)
4733 {
4734 struct cgraph_node *node;
4735 struct output_block *ob;
4736 unsigned int count = 0;
4737 lto_symtab_encoder_iterator lsei;
4738 lto_symtab_encoder_t encoder;
4739
4740
4741 if (!ipa_node_params_vector.exists ())
4742 return;
4743
4744 ob = create_output_block (LTO_section_jump_functions);
4745 encoder = ob->decl_state->symtab_node_encoder;
4746 ob->cgraph_node = NULL;
4747 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4748 lsei_next_function_in_partition (&lsei))
4749 {
4750 node = lsei_cgraph_node (lsei);
4751 if (cgraph_function_with_gimple_body_p (node)
4752 && IPA_NODE_REF (node) != NULL)
4753 count++;
4754 }
4755
4756 streamer_write_uhwi (ob, count);
4757
4758 /* Process all of the functions. */
4759 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4760 lsei_next_function_in_partition (&lsei))
4761 {
4762 node = lsei_cgraph_node (lsei);
4763 if (cgraph_function_with_gimple_body_p (node)
4764 && IPA_NODE_REF (node) != NULL)
4765 ipa_write_node_info (ob, node);
4766 }
4767 streamer_write_char_stream (ob->main_stream, 0);
4768 produce_asm (ob, NULL);
4769 destroy_output_block (ob);
4770 }
4771
4772 /* Read section in file FILE_DATA of length LEN with data DATA. */
4773
4774 static void
4775 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4776 size_t len)
4777 {
4778 const struct lto_function_header *header =
4779 (const struct lto_function_header *) data;
4780 const int cfg_offset = sizeof (struct lto_function_header);
4781 const int main_offset = cfg_offset + header->cfg_size;
4782 const int string_offset = main_offset + header->main_size;
4783 struct data_in *data_in;
4784 struct lto_input_block ib_main;
4785 unsigned int i;
4786 unsigned int count;
4787
4788 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4789 header->main_size);
4790
4791 data_in =
4792 lto_data_in_create (file_data, (const char *) data + string_offset,
4793 header->string_size, vNULL);
4794 count = streamer_read_uhwi (&ib_main);
4795
4796 for (i = 0; i < count; i++)
4797 {
4798 unsigned int index;
4799 struct cgraph_node *node;
4800 lto_symtab_encoder_t encoder;
4801
4802 index = streamer_read_uhwi (&ib_main);
4803 encoder = file_data->symtab_node_encoder;
4804 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4805 gcc_assert (node->definition);
4806 ipa_read_node_info (&ib_main, node, data_in);
4807 }
4808 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4809 len);
4810 lto_data_in_delete (data_in);
4811 }
4812
4813 /* Read ipcp jump functions. */
4814
4815 void
4816 ipa_prop_read_jump_functions (void)
4817 {
4818 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4819 struct lto_file_decl_data *file_data;
4820 unsigned int j = 0;
4821
4822 ipa_check_create_node_params ();
4823 ipa_check_create_edge_args ();
4824 ipa_register_cgraph_hooks ();
4825
4826 while ((file_data = file_data_vec[j++]))
4827 {
4828 size_t len;
4829 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4830
4831 if (data)
4832 ipa_prop_read_section (file_data, data, len);
4833 }
4834 }
4835
4836 /* After merging units, we can get mismatch in argument counts.
4837 Also decl merging might've rendered parameter lists obsolete.
4838 Also compute called_with_variable_arg info. */
4839
4840 void
4841 ipa_update_after_lto_read (void)
4842 {
4843 ipa_check_create_node_params ();
4844 ipa_check_create_edge_args ();
4845 }
4846
4847 void
4848 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4849 {
4850 int node_ref;
4851 unsigned int count = 0;
4852 lto_symtab_encoder_t encoder;
4853 struct ipa_agg_replacement_value *aggvals, *av;
4854
4855 aggvals = ipa_get_agg_replacements_for_node (node);
4856 encoder = ob->decl_state->symtab_node_encoder;
4857 node_ref = lto_symtab_encoder_encode (encoder, node);
4858 streamer_write_uhwi (ob, node_ref);
4859
4860 for (av = aggvals; av; av = av->next)
4861 count++;
4862 streamer_write_uhwi (ob, count);
4863
4864 for (av = aggvals; av; av = av->next)
4865 {
4866 struct bitpack_d bp;
4867
4868 streamer_write_uhwi (ob, av->offset);
4869 streamer_write_uhwi (ob, av->index);
4870 stream_write_tree (ob, av->value, true);
4871
4872 bp = bitpack_create (ob->main_stream);
4873 bp_pack_value (&bp, av->by_ref, 1);
4874 streamer_write_bitpack (&bp);
4875 }
4876 }
4877
4878 /* Stream in the aggregate value replacement chain for NODE from IB. */
4879
4880 static void
4881 read_agg_replacement_chain (struct lto_input_block *ib,
4882 struct cgraph_node *node,
4883 struct data_in *data_in)
4884 {
4885 struct ipa_agg_replacement_value *aggvals = NULL;
4886 unsigned int count, i;
4887
4888 count = streamer_read_uhwi (ib);
4889 for (i = 0; i <count; i++)
4890 {
4891 struct ipa_agg_replacement_value *av;
4892 struct bitpack_d bp;
4893
4894 av = ggc_alloc<ipa_agg_replacement_value> ();
4895 av->offset = streamer_read_uhwi (ib);
4896 av->index = streamer_read_uhwi (ib);
4897 av->value = stream_read_tree (ib, data_in);
4898 bp = streamer_read_bitpack (ib);
4899 av->by_ref = bp_unpack_value (&bp, 1);
4900 av->next = aggvals;
4901 aggvals = av;
4902 }
4903 ipa_set_node_agg_value_chain (node, aggvals);
4904 }
4905
4906 /* Write all aggregate replacement for nodes in set. */
4907
4908 void
4909 ipa_prop_write_all_agg_replacement (void)
4910 {
4911 struct cgraph_node *node;
4912 struct output_block *ob;
4913 unsigned int count = 0;
4914 lto_symtab_encoder_iterator lsei;
4915 lto_symtab_encoder_t encoder;
4916
4917 if (!ipa_node_agg_replacements)
4918 return;
4919
4920 ob = create_output_block (LTO_section_ipcp_transform);
4921 encoder = ob->decl_state->symtab_node_encoder;
4922 ob->cgraph_node = NULL;
4923 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4924 lsei_next_function_in_partition (&lsei))
4925 {
4926 node = lsei_cgraph_node (lsei);
4927 if (cgraph_function_with_gimple_body_p (node)
4928 && ipa_get_agg_replacements_for_node (node) != NULL)
4929 count++;
4930 }
4931
4932 streamer_write_uhwi (ob, count);
4933
4934 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4935 lsei_next_function_in_partition (&lsei))
4936 {
4937 node = lsei_cgraph_node (lsei);
4938 if (cgraph_function_with_gimple_body_p (node)
4939 && ipa_get_agg_replacements_for_node (node) != NULL)
4940 write_agg_replacement_chain (ob, node);
4941 }
4942 streamer_write_char_stream (ob->main_stream, 0);
4943 produce_asm (ob, NULL);
4944 destroy_output_block (ob);
4945 }
4946
4947 /* Read replacements section in file FILE_DATA of length LEN with data
4948 DATA. */
4949
4950 static void
4951 read_replacements_section (struct lto_file_decl_data *file_data,
4952 const char *data,
4953 size_t len)
4954 {
4955 const struct lto_function_header *header =
4956 (const struct lto_function_header *) data;
4957 const int cfg_offset = sizeof (struct lto_function_header);
4958 const int main_offset = cfg_offset + header->cfg_size;
4959 const int string_offset = main_offset + header->main_size;
4960 struct data_in *data_in;
4961 struct lto_input_block ib_main;
4962 unsigned int i;
4963 unsigned int count;
4964
4965 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4966 header->main_size);
4967
4968 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4969 header->string_size, vNULL);
4970 count = streamer_read_uhwi (&ib_main);
4971
4972 for (i = 0; i < count; i++)
4973 {
4974 unsigned int index;
4975 struct cgraph_node *node;
4976 lto_symtab_encoder_t encoder;
4977
4978 index = streamer_read_uhwi (&ib_main);
4979 encoder = file_data->symtab_node_encoder;
4980 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4981 gcc_assert (node->definition);
4982 read_agg_replacement_chain (&ib_main, node, data_in);
4983 }
4984 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4985 len);
4986 lto_data_in_delete (data_in);
4987 }
4988
4989 /* Read IPA-CP aggregate replacements. */
4990
4991 void
4992 ipa_prop_read_all_agg_replacement (void)
4993 {
4994 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4995 struct lto_file_decl_data *file_data;
4996 unsigned int j = 0;
4997
4998 while ((file_data = file_data_vec[j++]))
4999 {
5000 size_t len;
5001 const char *data = lto_get_section_data (file_data,
5002 LTO_section_ipcp_transform,
5003 NULL, &len);
5004 if (data)
5005 read_replacements_section (file_data, data, len);
5006 }
5007 }
5008
5009 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5010 NODE. */
5011
5012 static void
5013 adjust_agg_replacement_values (struct cgraph_node *node,
5014 struct ipa_agg_replacement_value *aggval)
5015 {
5016 struct ipa_agg_replacement_value *v;
5017 int i, c = 0, d = 0, *adj;
5018
5019 if (!node->clone.combined_args_to_skip)
5020 return;
5021
5022 for (v = aggval; v; v = v->next)
5023 {
5024 gcc_assert (v->index >= 0);
5025 if (c < v->index)
5026 c = v->index;
5027 }
5028 c++;
5029
5030 adj = XALLOCAVEC (int, c);
5031 for (i = 0; i < c; i++)
5032 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5033 {
5034 adj[i] = -1;
5035 d++;
5036 }
5037 else
5038 adj[i] = i - d;
5039
5040 for (v = aggval; v; v = v->next)
5041 v->index = adj[v->index];
5042 }
5043
5044 /* Dominator walker driving the ipcp modification phase. */
5045
5046 class ipcp_modif_dom_walker : public dom_walker
5047 {
5048 public:
5049 ipcp_modif_dom_walker (struct func_body_info *fbi,
5050 vec<ipa_param_descriptor> descs,
5051 struct ipa_agg_replacement_value *av,
5052 bool *sc, bool *cc)
5053 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5054 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5055
5056 virtual void before_dom_children (basic_block);
5057
5058 private:
5059 struct func_body_info *m_fbi;
5060 vec<ipa_param_descriptor> m_descriptors;
5061 struct ipa_agg_replacement_value *m_aggval;
5062 bool *m_something_changed, *m_cfg_changed;
5063 };
5064
5065 void
5066 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5067 {
5068 gimple_stmt_iterator gsi;
5069 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5070 {
5071 struct ipa_agg_replacement_value *v;
5072 gimple stmt = gsi_stmt (gsi);
5073 tree rhs, val, t;
5074 HOST_WIDE_INT offset, size;
5075 int index;
5076 bool by_ref, vce;
5077
5078 if (!gimple_assign_load_p (stmt))
5079 continue;
5080 rhs = gimple_assign_rhs1 (stmt);
5081 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5082 continue;
5083
5084 vce = false;
5085 t = rhs;
5086 while (handled_component_p (t))
5087 {
5088 /* V_C_E can do things like convert an array of integers to one
5089 bigger integer and similar things we do not handle below. */
5090 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5091 {
5092 vce = true;
5093 break;
5094 }
5095 t = TREE_OPERAND (t, 0);
5096 }
5097 if (vce)
5098 continue;
5099
5100 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5101 &offset, &size, &by_ref))
5102 continue;
5103 for (v = m_aggval; v; v = v->next)
5104 if (v->index == index
5105 && v->offset == offset)
5106 break;
5107 if (!v
5108 || v->by_ref != by_ref
5109 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5110 continue;
5111
5112 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5113 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5114 {
5115 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5116 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5117 else if (TYPE_SIZE (TREE_TYPE (rhs))
5118 == TYPE_SIZE (TREE_TYPE (v->value)))
5119 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5120 else
5121 {
5122 if (dump_file)
5123 {
5124 fprintf (dump_file, " const ");
5125 print_generic_expr (dump_file, v->value, 0);
5126 fprintf (dump_file, " can't be converted to type of ");
5127 print_generic_expr (dump_file, rhs, 0);
5128 fprintf (dump_file, "\n");
5129 }
5130 continue;
5131 }
5132 }
5133 else
5134 val = v->value;
5135
5136 if (dump_file && (dump_flags & TDF_DETAILS))
5137 {
5138 fprintf (dump_file, "Modifying stmt:\n ");
5139 print_gimple_stmt (dump_file, stmt, 0, 0);
5140 }
5141 gimple_assign_set_rhs_from_tree (&gsi, val);
5142 update_stmt (stmt);
5143
5144 if (dump_file && (dump_flags & TDF_DETAILS))
5145 {
5146 fprintf (dump_file, "into:\n ");
5147 print_gimple_stmt (dump_file, stmt, 0, 0);
5148 fprintf (dump_file, "\n");
5149 }
5150
5151 *m_something_changed = true;
5152 if (maybe_clean_eh_stmt (stmt)
5153 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5154 *m_cfg_changed = true;
5155 }
5156
5157 }
5158
5159 /* IPCP transformation phase doing propagation of aggregate values. */
5160
5161 unsigned int
5162 ipcp_transform_function (struct cgraph_node *node)
5163 {
5164 vec<ipa_param_descriptor> descriptors = vNULL;
5165 struct func_body_info fbi;
5166 struct ipa_agg_replacement_value *aggval;
5167 int param_count;
5168 bool cfg_changed = false, something_changed = false;
5169
5170 gcc_checking_assert (cfun);
5171 gcc_checking_assert (current_function_decl);
5172
5173 if (dump_file)
5174 fprintf (dump_file, "Modification phase of node %s/%i\n",
5175 node->name (), node->order);
5176
5177 aggval = ipa_get_agg_replacements_for_node (node);
5178 if (!aggval)
5179 return 0;
5180 param_count = count_formal_params (node->decl);
5181 if (param_count == 0)
5182 return 0;
5183 adjust_agg_replacement_values (node, aggval);
5184 if (dump_file)
5185 ipa_dump_agg_replacement_values (dump_file, aggval);
5186
5187 fbi.node = node;
5188 fbi.info = NULL;
5189 fbi.bb_infos = vNULL;
5190 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5191 fbi.param_count = param_count;
5192 fbi.aa_walked = 0;
5193
5194 descriptors.safe_grow_cleared (param_count);
5195 ipa_populate_param_decls (node, descriptors);
5196 calculate_dominance_info (CDI_DOMINATORS);
5197 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5198 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5199
5200 int i;
5201 struct ipa_bb_info *bi;
5202 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5203 free_ipa_bb_info (bi);
5204 fbi.bb_infos.release ();
5205 free_dominance_info (CDI_DOMINATORS);
5206 (*ipa_node_agg_replacements)[node->uid] = NULL;
5207 descriptors.release ();
5208
5209 if (!something_changed)
5210 return 0;
5211 else if (cfg_changed)
5212 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5213 else
5214 return TODO_update_ssa_only_virtuals;
5215 }