PR jit/63854: Fix leak of ipa hooks
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "predict.h"
25 #include "vec.h"
26 #include "hashtab.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "tm.h"
30 #include "hard-reg-set.h"
31 #include "input.h"
32 #include "function.h"
33 #include "dominance.h"
34 #include "cfg.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "expr.h"
44 #include "stor-layout.h"
45 #include "print-tree.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "langhooks.h"
51 #include "target.h"
52 #include "hash-map.h"
53 #include "plugin-api.h"
54 #include "ipa-ref.h"
55 #include "cgraph.h"
56 #include "alloc-pool.h"
57 #include "ipa-prop.h"
58 #include "bitmap.h"
59 #include "gimple-ssa.h"
60 #include "tree-cfg.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "tree-into-ssa.h"
64 #include "tree-dfa.h"
65 #include "tree-pass.h"
66 #include "tree-inline.h"
67 #include "ipa-inline.h"
68 #include "flags.h"
69 #include "diagnostic.h"
70 #include "gimple-pretty-print.h"
71 #include "lto-streamer.h"
72 #include "data-streamer.h"
73 #include "tree-streamer.h"
74 #include "params.h"
75 #include "ipa-utils.h"
76 #include "stringpool.h"
77 #include "tree-ssanames.h"
78 #include "dbgcnt.h"
79 #include "domwalk.h"
80 #include "builtins.h"
81 #include "calls.h"
82
83 /* Intermediate information that we get from alias analysis about a particular
84 parameter in a particular basic_block. When a parameter or the memory it
85 references is marked modified, we use that information in all dominatd
86 blocks without cosulting alias analysis oracle. */
87
88 struct param_aa_status
89 {
90 /* Set when this structure contains meaningful information. If not, the
91 structure describing a dominating BB should be used instead. */
92 bool valid;
93
94 /* Whether we have seen something which might have modified the data in
95 question. PARM is for the parameter itself, REF is for data it points to
96 but using the alias type of individual accesses and PT is the same thing
97 but for computing aggregate pass-through functions using a very inclusive
98 ao_ref. */
99 bool parm_modified, ref_modified, pt_modified;
100 };
101
102 /* Information related to a given BB that used only when looking at function
103 body. */
104
105 struct ipa_bb_info
106 {
107 /* Call graph edges going out of this BB. */
108 vec<cgraph_edge *> cg_edges;
109 /* Alias analysis statuses of each formal parameter at this bb. */
110 vec<param_aa_status> param_aa_statuses;
111 };
112
113 /* Structure with global information that is only used when looking at function
114 body. */
115
116 struct func_body_info
117 {
118 /* The node that is being analyzed. */
119 cgraph_node *node;
120
121 /* Its info. */
122 struct ipa_node_params *info;
123
124 /* Information about individual BBs. */
125 vec<ipa_bb_info> bb_infos;
126
127 /* Number of parameters. */
128 int param_count;
129
130 /* Number of statements already walked by when analyzing this function. */
131 unsigned int aa_walked;
132 };
133
134 /* Vector where the parameter infos are actually stored. */
135 vec<ipa_node_params> ipa_node_params_vector;
136 /* Vector of IPA-CP transformation data for each clone. */
137 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
138 /* Vector where the parameter infos are actually stored. */
139 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
140
141 /* Holders of ipa cgraph hooks: */
142 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
143 static struct cgraph_node_hook_list *node_removal_hook_holder;
144 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
145 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
146 static struct cgraph_node_hook_list *function_insertion_hook_holder;
147
148 /* Description of a reference to an IPA constant. */
149 struct ipa_cst_ref_desc
150 {
151 /* Edge that corresponds to the statement which took the reference. */
152 struct cgraph_edge *cs;
153 /* Linked list of duplicates created when call graph edges are cloned. */
154 struct ipa_cst_ref_desc *next_duplicate;
155 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
156 if out of control. */
157 int refcount;
158 };
159
160 /* Allocation pool for reference descriptions. */
161
162 static alloc_pool ipa_refdesc_pool;
163
164 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
165 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
166
167 static bool
168 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
169 {
170 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
171
172 if (!fs_opts)
173 return false;
174 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
175 }
176
177 /* Return index of the formal whose tree is PTREE in function which corresponds
178 to INFO. */
179
180 static int
181 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
182 {
183 int i, count;
184
185 count = descriptors.length ();
186 for (i = 0; i < count; i++)
187 if (descriptors[i].decl == ptree)
188 return i;
189
190 return -1;
191 }
192
193 /* Return index of the formal whose tree is PTREE in function which corresponds
194 to INFO. */
195
196 int
197 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
198 {
199 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
200 }
201
202 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
203 NODE. */
204
205 static void
206 ipa_populate_param_decls (struct cgraph_node *node,
207 vec<ipa_param_descriptor> &descriptors)
208 {
209 tree fndecl;
210 tree fnargs;
211 tree parm;
212 int param_num;
213
214 fndecl = node->decl;
215 gcc_assert (gimple_has_body_p (fndecl));
216 fnargs = DECL_ARGUMENTS (fndecl);
217 param_num = 0;
218 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
219 {
220 descriptors[param_num].decl = parm;
221 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
222 true);
223 param_num++;
224 }
225 }
226
227 /* Return how many formal parameters FNDECL has. */
228
229 int
230 count_formal_params (tree fndecl)
231 {
232 tree parm;
233 int count = 0;
234 gcc_assert (gimple_has_body_p (fndecl));
235
236 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
237 count++;
238
239 return count;
240 }
241
242 /* Return the declaration of Ith formal parameter of the function corresponding
243 to INFO. Note there is no setter function as this array is built just once
244 using ipa_initialize_node_params. */
245
246 void
247 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
248 {
249 fprintf (file, "param #%i", i);
250 if (info->descriptors[i].decl)
251 {
252 fprintf (file, " ");
253 print_generic_expr (file, info->descriptors[i].decl, 0);
254 }
255 }
256
257 /* Initialize the ipa_node_params structure associated with NODE
258 to hold PARAM_COUNT parameters. */
259
260 void
261 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
262 {
263 struct ipa_node_params *info = IPA_NODE_REF (node);
264
265 if (!info->descriptors.exists () && param_count)
266 info->descriptors.safe_grow_cleared (param_count);
267 }
268
269 /* Initialize the ipa_node_params structure associated with NODE by counting
270 the function parameters, creating the descriptors and populating their
271 param_decls. */
272
273 void
274 ipa_initialize_node_params (struct cgraph_node *node)
275 {
276 struct ipa_node_params *info = IPA_NODE_REF (node);
277
278 if (!info->descriptors.exists ())
279 {
280 ipa_alloc_node_params (node, count_formal_params (node->decl));
281 ipa_populate_param_decls (node, info->descriptors);
282 }
283 }
284
285 /* Print the jump functions associated with call graph edge CS to file F. */
286
287 static void
288 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
289 {
290 int i, count;
291
292 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
293 for (i = 0; i < count; i++)
294 {
295 struct ipa_jump_func *jump_func;
296 enum jump_func_type type;
297
298 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
299 type = jump_func->type;
300
301 fprintf (f, " param %d: ", i);
302 if (type == IPA_JF_UNKNOWN)
303 fprintf (f, "UNKNOWN\n");
304 else if (type == IPA_JF_CONST)
305 {
306 tree val = jump_func->value.constant.value;
307 fprintf (f, "CONST: ");
308 print_generic_expr (f, val, 0);
309 if (TREE_CODE (val) == ADDR_EXPR
310 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
311 {
312 fprintf (f, " -> ");
313 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
314 0);
315 }
316 fprintf (f, "\n");
317 }
318 else if (type == IPA_JF_PASS_THROUGH)
319 {
320 fprintf (f, "PASS THROUGH: ");
321 fprintf (f, "%d, op %s",
322 jump_func->value.pass_through.formal_id,
323 get_tree_code_name(jump_func->value.pass_through.operation));
324 if (jump_func->value.pass_through.operation != NOP_EXPR)
325 {
326 fprintf (f, " ");
327 print_generic_expr (f,
328 jump_func->value.pass_through.operand, 0);
329 }
330 if (jump_func->value.pass_through.agg_preserved)
331 fprintf (f, ", agg_preserved");
332 fprintf (f, "\n");
333 }
334 else if (type == IPA_JF_ANCESTOR)
335 {
336 fprintf (f, "ANCESTOR: ");
337 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC,
338 jump_func->value.ancestor.formal_id,
339 jump_func->value.ancestor.offset);
340 if (jump_func->value.ancestor.agg_preserved)
341 fprintf (f, ", agg_preserved");
342 fprintf (f, "\n");
343 }
344
345 if (jump_func->agg.items)
346 {
347 struct ipa_agg_jf_item *item;
348 int j;
349
350 fprintf (f, " Aggregate passed by %s:\n",
351 jump_func->agg.by_ref ? "reference" : "value");
352 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
353 {
354 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
355 item->offset);
356 if (TYPE_P (item->value))
357 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
358 tree_to_uhwi (TYPE_SIZE (item->value)));
359 else
360 {
361 fprintf (f, "cst: ");
362 print_generic_expr (f, item->value, 0);
363 }
364 fprintf (f, "\n");
365 }
366 }
367
368 struct ipa_polymorphic_call_context *ctx
369 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
370 if (ctx && !ctx->useless_p ())
371 {
372 fprintf (f, " Context: ");
373 ctx->dump (dump_file);
374 }
375
376 if (jump_func->alignment.known)
377 {
378 fprintf (f, " Alignment: %u, misalignment: %u\n",
379 jump_func->alignment.align,
380 jump_func->alignment.misalign);
381 }
382 else
383 fprintf (f, " Unknown alignment\n");
384 }
385 }
386
387
388 /* Print the jump functions of all arguments on all call graph edges going from
389 NODE to file F. */
390
391 void
392 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
393 {
394 struct cgraph_edge *cs;
395
396 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
397 node->order);
398 for (cs = node->callees; cs; cs = cs->next_callee)
399 {
400 if (!ipa_edge_args_info_available_for_edge_p (cs))
401 continue;
402
403 fprintf (f, " callsite %s/%i -> %s/%i : \n",
404 xstrdup (node->name ()), node->order,
405 xstrdup (cs->callee->name ()),
406 cs->callee->order);
407 ipa_print_node_jump_functions_for_edge (f, cs);
408 }
409
410 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
411 {
412 struct cgraph_indirect_call_info *ii;
413 if (!ipa_edge_args_info_available_for_edge_p (cs))
414 continue;
415
416 ii = cs->indirect_info;
417 if (ii->agg_contents)
418 fprintf (f, " indirect %s callsite, calling param %i, "
419 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
420 ii->member_ptr ? "member ptr" : "aggregate",
421 ii->param_index, ii->offset,
422 ii->by_ref ? "by reference" : "by_value");
423 else
424 fprintf (f, " indirect %s callsite, calling param %i, "
425 "offset " HOST_WIDE_INT_PRINT_DEC,
426 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
427 ii->offset);
428
429 if (cs->call_stmt)
430 {
431 fprintf (f, ", for stmt ");
432 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
433 }
434 else
435 fprintf (f, "\n");
436 if (ii->polymorphic)
437 ii->context.dump (f);
438 ipa_print_node_jump_functions_for_edge (f, cs);
439 }
440 }
441
442 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
443
444 void
445 ipa_print_all_jump_functions (FILE *f)
446 {
447 struct cgraph_node *node;
448
449 fprintf (f, "\nJump functions:\n");
450 FOR_EACH_FUNCTION (node)
451 {
452 ipa_print_node_jump_functions (f, node);
453 }
454 }
455
456 /* Set jfunc to be a know-really nothing jump function. */
457
458 static void
459 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
460 {
461 jfunc->type = IPA_JF_UNKNOWN;
462 jfunc->alignment.known = false;
463 }
464
465 /* Set JFUNC to be a copy of another jmp (to be used by jump function
466 combination code). The two functions will share their rdesc. */
467
468 static void
469 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
470 struct ipa_jump_func *src)
471
472 {
473 gcc_checking_assert (src->type == IPA_JF_CONST);
474 dst->type = IPA_JF_CONST;
475 dst->value.constant = src->value.constant;
476 }
477
478 /* Set JFUNC to be a constant jmp function. */
479
480 static void
481 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
482 struct cgraph_edge *cs)
483 {
484 constant = unshare_expr (constant);
485 if (constant && EXPR_P (constant))
486 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
487 jfunc->type = IPA_JF_CONST;
488 jfunc->value.constant.value = unshare_expr_without_location (constant);
489
490 if (TREE_CODE (constant) == ADDR_EXPR
491 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
492 {
493 struct ipa_cst_ref_desc *rdesc;
494 if (!ipa_refdesc_pool)
495 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
496 sizeof (struct ipa_cst_ref_desc), 32);
497
498 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
499 rdesc->cs = cs;
500 rdesc->next_duplicate = NULL;
501 rdesc->refcount = 1;
502 jfunc->value.constant.rdesc = rdesc;
503 }
504 else
505 jfunc->value.constant.rdesc = NULL;
506 }
507
508 /* Set JFUNC to be a simple pass-through jump function. */
509 static void
510 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
511 bool agg_preserved)
512 {
513 jfunc->type = IPA_JF_PASS_THROUGH;
514 jfunc->value.pass_through.operand = NULL_TREE;
515 jfunc->value.pass_through.formal_id = formal_id;
516 jfunc->value.pass_through.operation = NOP_EXPR;
517 jfunc->value.pass_through.agg_preserved = agg_preserved;
518 }
519
520 /* Set JFUNC to be an arithmetic pass through jump function. */
521
522 static void
523 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
524 tree operand, enum tree_code operation)
525 {
526 jfunc->type = IPA_JF_PASS_THROUGH;
527 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
528 jfunc->value.pass_through.formal_id = formal_id;
529 jfunc->value.pass_through.operation = operation;
530 jfunc->value.pass_through.agg_preserved = false;
531 }
532
533 /* Set JFUNC to be an ancestor jump function. */
534
535 static void
536 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
537 int formal_id, bool agg_preserved)
538 {
539 jfunc->type = IPA_JF_ANCESTOR;
540 jfunc->value.ancestor.formal_id = formal_id;
541 jfunc->value.ancestor.offset = offset;
542 jfunc->value.ancestor.agg_preserved = agg_preserved;
543 }
544
545 /* Get IPA BB information about the given BB. FBI is the context of analyzis
546 of this function body. */
547
548 static struct ipa_bb_info *
549 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
550 {
551 gcc_checking_assert (fbi);
552 return &fbi->bb_infos[bb->index];
553 }
554
555 /* Structure to be passed in between detect_type_change and
556 check_stmt_for_type_change. */
557
558 struct prop_type_change_info
559 {
560 /* Offset into the object where there is the virtual method pointer we are
561 looking for. */
562 HOST_WIDE_INT offset;
563 /* The declaration or SSA_NAME pointer of the base that we are checking for
564 type change. */
565 tree object;
566 /* Set to true if dynamic type change has been detected. */
567 bool type_maybe_changed;
568 };
569
570 /* Return true if STMT can modify a virtual method table pointer.
571
572 This function makes special assumptions about both constructors and
573 destructors which are all the functions that are allowed to alter the VMT
574 pointers. It assumes that destructors begin with assignment into all VMT
575 pointers and that constructors essentially look in the following way:
576
577 1) The very first thing they do is that they call constructors of ancestor
578 sub-objects that have them.
579
580 2) Then VMT pointers of this and all its ancestors is set to new values
581 corresponding to the type corresponding to the constructor.
582
583 3) Only afterwards, other stuff such as constructor of member sub-objects
584 and the code written by the user is run. Only this may include calling
585 virtual functions, directly or indirectly.
586
587 There is no way to call a constructor of an ancestor sub-object in any
588 other way.
589
590 This means that we do not have to care whether constructors get the correct
591 type information because they will always change it (in fact, if we define
592 the type to be given by the VMT pointer, it is undefined).
593
594 The most important fact to derive from the above is that if, for some
595 statement in the section 3, we try to detect whether the dynamic type has
596 changed, we can safely ignore all calls as we examine the function body
597 backwards until we reach statements in section 2 because these calls cannot
598 be ancestor constructors or destructors (if the input is not bogus) and so
599 do not change the dynamic type (this holds true only for automatically
600 allocated objects but at the moment we devirtualize only these). We then
601 must detect that statements in section 2 change the dynamic type and can try
602 to derive the new type. That is enough and we can stop, we will never see
603 the calls into constructors of sub-objects in this code. Therefore we can
604 safely ignore all call statements that we traverse.
605 */
606
607 static bool
608 stmt_may_be_vtbl_ptr_store (gimple stmt)
609 {
610 if (is_gimple_call (stmt))
611 return false;
612 if (gimple_clobber_p (stmt))
613 return false;
614 else if (is_gimple_assign (stmt))
615 {
616 tree lhs = gimple_assign_lhs (stmt);
617
618 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
619 {
620 if (flag_strict_aliasing
621 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
622 return false;
623
624 if (TREE_CODE (lhs) == COMPONENT_REF
625 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
626 return false;
627 /* In the future we might want to use get_base_ref_and_offset to find
628 if there is a field corresponding to the offset and if so, proceed
629 almost like if it was a component ref. */
630 }
631 }
632 return true;
633 }
634
635 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
636 to check whether a particular statement may modify the virtual table
637 pointerIt stores its result into DATA, which points to a
638 prop_type_change_info structure. */
639
640 static bool
641 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
642 {
643 gimple stmt = SSA_NAME_DEF_STMT (vdef);
644 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
645
646 if (stmt_may_be_vtbl_ptr_store (stmt))
647 {
648 tci->type_maybe_changed = true;
649 return true;
650 }
651 else
652 return false;
653 }
654
655 /* See if ARG is PARAM_DECl describing instance passed by pointer
656 or reference in FUNCTION. Return false if the dynamic type may change
657 in between beggining of the function until CALL is invoked.
658
659 Generally functions are not allowed to change type of such instances,
660 but they call destructors. We assume that methods can not destroy the THIS
661 pointer. Also as a special cases, constructor and destructors may change
662 type of the THIS pointer. */
663
664 static bool
665 param_type_may_change_p (tree function, tree arg, gimple call)
666 {
667 /* Pure functions can not do any changes on the dynamic type;
668 that require writting to memory. */
669 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
670 return false;
671 /* We need to check if we are within inlined consturctor
672 or destructor (ideally we would have way to check that the
673 inline cdtor is actually working on ARG, but we don't have
674 easy tie on this, so punt on all non-pure cdtors.
675 We may also record the types of cdtors and once we know type
676 of the instance match them.
677
678 Also code unification optimizations may merge calls from
679 different blocks making return values unreliable. So
680 do nothing during late optimization. */
681 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
682 return true;
683 if (TREE_CODE (arg) == SSA_NAME
684 && SSA_NAME_IS_DEFAULT_DEF (arg)
685 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
686 {
687 /* Normal (non-THIS) argument. */
688 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
689 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
690 /* THIS pointer of an method - here we we want to watch constructors
691 and destructors as those definitely may change the dynamic
692 type. */
693 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
694 && !DECL_CXX_CONSTRUCTOR_P (function)
695 && !DECL_CXX_DESTRUCTOR_P (function)
696 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
697 {
698 /* Walk the inline stack and watch out for ctors/dtors. */
699 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
700 block = BLOCK_SUPERCONTEXT (block))
701 if (BLOCK_ABSTRACT_ORIGIN (block)
702 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
703 {
704 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
705
706 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
707 continue;
708 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
709 && (DECL_CXX_CONSTRUCTOR_P (fn)
710 || DECL_CXX_DESTRUCTOR_P (fn)))
711 return true;
712 }
713 return false;
714 }
715 }
716 return true;
717 }
718
719 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
720 callsite CALL) by looking for assignments to its virtual table pointer. If
721 it is, return true and fill in the jump function JFUNC with relevant type
722 information or set it to unknown. ARG is the object itself (not a pointer
723 to it, unless dereferenced). BASE is the base of the memory access as
724 returned by get_ref_base_and_extent, as is the offset.
725
726 This is helper function for detect_type_change and detect_type_change_ssa
727 that does the heavy work which is usually unnecesary. */
728
729 static bool
730 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
731 gcall *call, struct ipa_jump_func *jfunc,
732 HOST_WIDE_INT offset)
733 {
734 struct prop_type_change_info tci;
735 ao_ref ao;
736 bool entry_reached = false;
737
738 gcc_checking_assert (DECL_P (arg)
739 || TREE_CODE (arg) == MEM_REF
740 || handled_component_p (arg));
741
742 comp_type = TYPE_MAIN_VARIANT (comp_type);
743
744 /* Const calls cannot call virtual methods through VMT and so type changes do
745 not matter. */
746 if (!flag_devirtualize || !gimple_vuse (call)
747 /* Be sure expected_type is polymorphic. */
748 || !comp_type
749 || TREE_CODE (comp_type) != RECORD_TYPE
750 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
751 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
752 return true;
753
754 ao_ref_init (&ao, arg);
755 ao.base = base;
756 ao.offset = offset;
757 ao.size = POINTER_SIZE;
758 ao.max_size = ao.size;
759
760 tci.offset = offset;
761 tci.object = get_base_address (arg);
762 tci.type_maybe_changed = false;
763
764 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
765 &tci, NULL, &entry_reached);
766 if (!tci.type_maybe_changed)
767 return false;
768
769 ipa_set_jf_unknown (jfunc);
770 return true;
771 }
772
773 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
774 If it is, return true and fill in the jump function JFUNC with relevant type
775 information or set it to unknown. ARG is the object itself (not a pointer
776 to it, unless dereferenced). BASE is the base of the memory access as
777 returned by get_ref_base_and_extent, as is the offset. */
778
779 static bool
780 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
781 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
782 {
783 if (!flag_devirtualize)
784 return false;
785
786 if (TREE_CODE (base) == MEM_REF
787 && !param_type_may_change_p (current_function_decl,
788 TREE_OPERAND (base, 0),
789 call))
790 return false;
791 return detect_type_change_from_memory_writes (arg, base, comp_type,
792 call, jfunc, offset);
793 }
794
795 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
796 SSA name (its dereference will become the base and the offset is assumed to
797 be zero). */
798
799 static bool
800 detect_type_change_ssa (tree arg, tree comp_type,
801 gcall *call, struct ipa_jump_func *jfunc)
802 {
803 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
804 if (!flag_devirtualize
805 || !POINTER_TYPE_P (TREE_TYPE (arg)))
806 return false;
807
808 if (!param_type_may_change_p (current_function_decl, arg, call))
809 return false;
810
811 arg = build2 (MEM_REF, ptr_type_node, arg,
812 build_int_cst (ptr_type_node, 0));
813
814 return detect_type_change_from_memory_writes (arg, arg, comp_type,
815 call, jfunc, 0);
816 }
817
818 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
819 boolean variable pointed to by DATA. */
820
821 static bool
822 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
823 void *data)
824 {
825 bool *b = (bool *) data;
826 *b = true;
827 return true;
828 }
829
830 /* Return true if we have already walked so many statements in AA that we
831 should really just start giving up. */
832
833 static bool
834 aa_overwalked (struct func_body_info *fbi)
835 {
836 gcc_checking_assert (fbi);
837 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
838 }
839
840 /* Find the nearest valid aa status for parameter specified by INDEX that
841 dominates BB. */
842
843 static struct param_aa_status *
844 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
845 int index)
846 {
847 while (true)
848 {
849 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
850 if (!bb)
851 return NULL;
852 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
853 if (!bi->param_aa_statuses.is_empty ()
854 && bi->param_aa_statuses[index].valid)
855 return &bi->param_aa_statuses[index];
856 }
857 }
858
859 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
860 structures and/or intialize the result with a dominating description as
861 necessary. */
862
863 static struct param_aa_status *
864 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
865 int index)
866 {
867 gcc_checking_assert (fbi);
868 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
869 if (bi->param_aa_statuses.is_empty ())
870 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
871 struct param_aa_status *paa = &bi->param_aa_statuses[index];
872 if (!paa->valid)
873 {
874 gcc_checking_assert (!paa->parm_modified
875 && !paa->ref_modified
876 && !paa->pt_modified);
877 struct param_aa_status *dom_paa;
878 dom_paa = find_dominating_aa_status (fbi, bb, index);
879 if (dom_paa)
880 *paa = *dom_paa;
881 else
882 paa->valid = true;
883 }
884
885 return paa;
886 }
887
888 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
889 a value known not to be modified in this function before reaching the
890 statement STMT. FBI holds information about the function we have so far
891 gathered but do not survive the summary building stage. */
892
893 static bool
894 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
895 gimple stmt, tree parm_load)
896 {
897 struct param_aa_status *paa;
898 bool modified = false;
899 ao_ref refd;
900
901 /* FIXME: FBI can be NULL if we are being called from outside
902 ipa_node_analysis or ipcp_transform_function, which currently happens
903 during inlining analysis. It would be great to extend fbi's lifetime and
904 always have it. Currently, we are just not afraid of too much walking in
905 that case. */
906 if (fbi)
907 {
908 if (aa_overwalked (fbi))
909 return false;
910 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
911 if (paa->parm_modified)
912 return false;
913 }
914 else
915 paa = NULL;
916
917 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
918 ao_ref_init (&refd, parm_load);
919 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
920 &modified, NULL);
921 if (fbi)
922 fbi->aa_walked += walked;
923 if (paa && modified)
924 paa->parm_modified = true;
925 return !modified;
926 }
927
928 /* If STMT is an assignment that loads a value from an parameter declaration,
929 return the index of the parameter in ipa_node_params which has not been
930 modified. Otherwise return -1. */
931
932 static int
933 load_from_unmodified_param (struct func_body_info *fbi,
934 vec<ipa_param_descriptor> descriptors,
935 gimple stmt)
936 {
937 int index;
938 tree op1;
939
940 if (!gimple_assign_single_p (stmt))
941 return -1;
942
943 op1 = gimple_assign_rhs1 (stmt);
944 if (TREE_CODE (op1) != PARM_DECL)
945 return -1;
946
947 index = ipa_get_param_decl_index_1 (descriptors, op1);
948 if (index < 0
949 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
950 return -1;
951
952 return index;
953 }
954
955 /* Return true if memory reference REF (which must be a load through parameter
956 with INDEX) loads data that are known to be unmodified in this function
957 before reaching statement STMT. */
958
959 static bool
960 parm_ref_data_preserved_p (struct func_body_info *fbi,
961 int index, gimple stmt, tree ref)
962 {
963 struct param_aa_status *paa;
964 bool modified = false;
965 ao_ref refd;
966
967 /* FIXME: FBI can be NULL if we are being called from outside
968 ipa_node_analysis or ipcp_transform_function, which currently happens
969 during inlining analysis. It would be great to extend fbi's lifetime and
970 always have it. Currently, we are just not afraid of too much walking in
971 that case. */
972 if (fbi)
973 {
974 if (aa_overwalked (fbi))
975 return false;
976 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
977 if (paa->ref_modified)
978 return false;
979 }
980 else
981 paa = NULL;
982
983 gcc_checking_assert (gimple_vuse (stmt));
984 ao_ref_init (&refd, ref);
985 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
986 &modified, NULL);
987 if (fbi)
988 fbi->aa_walked += walked;
989 if (paa && modified)
990 paa->ref_modified = true;
991 return !modified;
992 }
993
994 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
995 is known to be unmodified in this function before reaching call statement
996 CALL into which it is passed. FBI describes the function body. */
997
998 static bool
999 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1000 gimple call, tree parm)
1001 {
1002 bool modified = false;
1003 ao_ref refd;
1004
1005 /* It's unnecessary to calculate anything about memory contnets for a const
1006 function because it is not goin to use it. But do not cache the result
1007 either. Also, no such calculations for non-pointers. */
1008 if (!gimple_vuse (call)
1009 || !POINTER_TYPE_P (TREE_TYPE (parm))
1010 || aa_overwalked (fbi))
1011 return false;
1012
1013 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1014 index);
1015 if (paa->pt_modified)
1016 return false;
1017
1018 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1019 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1020 &modified, NULL);
1021 fbi->aa_walked += walked;
1022 if (modified)
1023 paa->pt_modified = true;
1024 return !modified;
1025 }
1026
1027 /* Return true if we can prove that OP is a memory reference loading unmodified
1028 data from an aggregate passed as a parameter and if the aggregate is passed
1029 by reference, that the alias type of the load corresponds to the type of the
1030 formal parameter (so that we can rely on this type for TBAA in callers).
1031 INFO and PARMS_AINFO describe parameters of the current function (but the
1032 latter can be NULL), STMT is the load statement. If function returns true,
1033 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1034 within the aggregate and whether it is a load from a value passed by
1035 reference respectively. */
1036
1037 static bool
1038 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1039 vec<ipa_param_descriptor> descriptors,
1040 gimple stmt, tree op, int *index_p,
1041 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1042 bool *by_ref_p)
1043 {
1044 int index;
1045 HOST_WIDE_INT size, max_size;
1046 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1047
1048 if (max_size == -1 || max_size != size || *offset_p < 0)
1049 return false;
1050
1051 if (DECL_P (base))
1052 {
1053 int index = ipa_get_param_decl_index_1 (descriptors, base);
1054 if (index >= 0
1055 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1056 {
1057 *index_p = index;
1058 *by_ref_p = false;
1059 if (size_p)
1060 *size_p = size;
1061 return true;
1062 }
1063 return false;
1064 }
1065
1066 if (TREE_CODE (base) != MEM_REF
1067 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1068 || !integer_zerop (TREE_OPERAND (base, 1)))
1069 return false;
1070
1071 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1072 {
1073 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1074 index = ipa_get_param_decl_index_1 (descriptors, parm);
1075 }
1076 else
1077 {
1078 /* This branch catches situations where a pointer parameter is not a
1079 gimple register, for example:
1080
1081 void hip7(S*) (struct S * p)
1082 {
1083 void (*<T2e4>) (struct S *) D.1867;
1084 struct S * p.1;
1085
1086 <bb 2>:
1087 p.1_1 = p;
1088 D.1867_2 = p.1_1->f;
1089 D.1867_2 ();
1090 gdp = &p;
1091 */
1092
1093 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1094 index = load_from_unmodified_param (fbi, descriptors, def);
1095 }
1096
1097 if (index >= 0
1098 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1099 {
1100 *index_p = index;
1101 *by_ref_p = true;
1102 if (size_p)
1103 *size_p = size;
1104 return true;
1105 }
1106 return false;
1107 }
1108
1109 /* Just like the previous function, just without the param_analysis_info
1110 pointer, for users outside of this file. */
1111
1112 bool
1113 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1114 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1115 bool *by_ref_p)
1116 {
1117 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1118 offset_p, NULL, by_ref_p);
1119 }
1120
1121 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1122 of an assignment statement STMT, try to determine whether we are actually
1123 handling any of the following cases and construct an appropriate jump
1124 function into JFUNC if so:
1125
1126 1) The passed value is loaded from a formal parameter which is not a gimple
1127 register (most probably because it is addressable, the value has to be
1128 scalar) and we can guarantee the value has not changed. This case can
1129 therefore be described by a simple pass-through jump function. For example:
1130
1131 foo (int a)
1132 {
1133 int a.0;
1134
1135 a.0_2 = a;
1136 bar (a.0_2);
1137
1138 2) The passed value can be described by a simple arithmetic pass-through
1139 jump function. E.g.
1140
1141 foo (int a)
1142 {
1143 int D.2064;
1144
1145 D.2064_4 = a.1(D) + 4;
1146 bar (D.2064_4);
1147
1148 This case can also occur in combination of the previous one, e.g.:
1149
1150 foo (int a, int z)
1151 {
1152 int a.0;
1153 int D.2064;
1154
1155 a.0_3 = a;
1156 D.2064_4 = a.0_3 + 4;
1157 foo (D.2064_4);
1158
1159 3) The passed value is an address of an object within another one (which
1160 also passed by reference). Such situations are described by an ancestor
1161 jump function and describe situations such as:
1162
1163 B::foo() (struct B * const this)
1164 {
1165 struct A * D.1845;
1166
1167 D.1845_2 = &this_1(D)->D.1748;
1168 A::bar (D.1845_2);
1169
1170 INFO is the structure describing individual parameters access different
1171 stages of IPA optimizations. PARMS_AINFO contains the information that is
1172 only needed for intraprocedural analysis. */
1173
1174 static void
1175 compute_complex_assign_jump_func (struct func_body_info *fbi,
1176 struct ipa_node_params *info,
1177 struct ipa_jump_func *jfunc,
1178 gcall *call, gimple stmt, tree name,
1179 tree param_type)
1180 {
1181 HOST_WIDE_INT offset, size, max_size;
1182 tree op1, tc_ssa, base, ssa;
1183 int index;
1184
1185 op1 = gimple_assign_rhs1 (stmt);
1186
1187 if (TREE_CODE (op1) == SSA_NAME)
1188 {
1189 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1190 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1191 else
1192 index = load_from_unmodified_param (fbi, info->descriptors,
1193 SSA_NAME_DEF_STMT (op1));
1194 tc_ssa = op1;
1195 }
1196 else
1197 {
1198 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1199 tc_ssa = gimple_assign_lhs (stmt);
1200 }
1201
1202 if (index >= 0)
1203 {
1204 tree op2 = gimple_assign_rhs2 (stmt);
1205
1206 if (op2)
1207 {
1208 if (!is_gimple_ip_invariant (op2)
1209 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1210 && !useless_type_conversion_p (TREE_TYPE (name),
1211 TREE_TYPE (op1))))
1212 return;
1213
1214 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1215 gimple_assign_rhs_code (stmt));
1216 }
1217 else if (gimple_assign_single_p (stmt))
1218 {
1219 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1220 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1221 }
1222 return;
1223 }
1224
1225 if (TREE_CODE (op1) != ADDR_EXPR)
1226 return;
1227 op1 = TREE_OPERAND (op1, 0);
1228 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1229 return;
1230 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1231 if (TREE_CODE (base) != MEM_REF
1232 /* If this is a varying address, punt. */
1233 || max_size == -1
1234 || max_size != size)
1235 return;
1236 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1237 ssa = TREE_OPERAND (base, 0);
1238 if (TREE_CODE (ssa) != SSA_NAME
1239 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1240 || offset < 0)
1241 return;
1242
1243 /* Dynamic types are changed in constructors and destructors. */
1244 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1245 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1246 ipa_set_ancestor_jf (jfunc, offset, index,
1247 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1248 }
1249
1250 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1251 it looks like:
1252
1253 iftmp.1_3 = &obj_2(D)->D.1762;
1254
1255 The base of the MEM_REF must be a default definition SSA NAME of a
1256 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1257 whole MEM_REF expression is returned and the offset calculated from any
1258 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1259 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1260
1261 static tree
1262 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1263 {
1264 HOST_WIDE_INT size, max_size;
1265 tree expr, parm, obj;
1266
1267 if (!gimple_assign_single_p (assign))
1268 return NULL_TREE;
1269 expr = gimple_assign_rhs1 (assign);
1270
1271 if (TREE_CODE (expr) != ADDR_EXPR)
1272 return NULL_TREE;
1273 expr = TREE_OPERAND (expr, 0);
1274 obj = expr;
1275 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1276
1277 if (TREE_CODE (expr) != MEM_REF
1278 /* If this is a varying address, punt. */
1279 || max_size == -1
1280 || max_size != size
1281 || *offset < 0)
1282 return NULL_TREE;
1283 parm = TREE_OPERAND (expr, 0);
1284 if (TREE_CODE (parm) != SSA_NAME
1285 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1286 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1287 return NULL_TREE;
1288
1289 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1290 *obj_p = obj;
1291 return expr;
1292 }
1293
1294
1295 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1296 statement PHI, try to find out whether NAME is in fact a
1297 multiple-inheritance typecast from a descendant into an ancestor of a formal
1298 parameter and thus can be described by an ancestor jump function and if so,
1299 write the appropriate function into JFUNC.
1300
1301 Essentially we want to match the following pattern:
1302
1303 if (obj_2(D) != 0B)
1304 goto <bb 3>;
1305 else
1306 goto <bb 4>;
1307
1308 <bb 3>:
1309 iftmp.1_3 = &obj_2(D)->D.1762;
1310
1311 <bb 4>:
1312 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1313 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1314 return D.1879_6; */
1315
1316 static void
1317 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1318 struct ipa_node_params *info,
1319 struct ipa_jump_func *jfunc,
1320 gcall *call, gphi *phi)
1321 {
1322 HOST_WIDE_INT offset;
1323 gimple assign, cond;
1324 basic_block phi_bb, assign_bb, cond_bb;
1325 tree tmp, parm, expr, obj;
1326 int index, i;
1327
1328 if (gimple_phi_num_args (phi) != 2)
1329 return;
1330
1331 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1332 tmp = PHI_ARG_DEF (phi, 0);
1333 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1334 tmp = PHI_ARG_DEF (phi, 1);
1335 else
1336 return;
1337 if (TREE_CODE (tmp) != SSA_NAME
1338 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1339 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1340 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1341 return;
1342
1343 assign = SSA_NAME_DEF_STMT (tmp);
1344 assign_bb = gimple_bb (assign);
1345 if (!single_pred_p (assign_bb))
1346 return;
1347 expr = get_ancestor_addr_info (assign, &obj, &offset);
1348 if (!expr)
1349 return;
1350 parm = TREE_OPERAND (expr, 0);
1351 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1352 if (index < 0)
1353 return;
1354
1355 cond_bb = single_pred (assign_bb);
1356 cond = last_stmt (cond_bb);
1357 if (!cond
1358 || gimple_code (cond) != GIMPLE_COND
1359 || gimple_cond_code (cond) != NE_EXPR
1360 || gimple_cond_lhs (cond) != parm
1361 || !integer_zerop (gimple_cond_rhs (cond)))
1362 return;
1363
1364 phi_bb = gimple_bb (phi);
1365 for (i = 0; i < 2; i++)
1366 {
1367 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1368 if (pred != assign_bb && pred != cond_bb)
1369 return;
1370 }
1371
1372 ipa_set_ancestor_jf (jfunc, offset, index,
1373 parm_ref_data_pass_through_p (fbi, index, call, parm));
1374 }
1375
1376 /* Inspect the given TYPE and return true iff it has the same structure (the
1377 same number of fields of the same types) as a C++ member pointer. If
1378 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1379 corresponding fields there. */
1380
1381 static bool
1382 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1383 {
1384 tree fld;
1385
1386 if (TREE_CODE (type) != RECORD_TYPE)
1387 return false;
1388
1389 fld = TYPE_FIELDS (type);
1390 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1391 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1392 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1393 return false;
1394
1395 if (method_ptr)
1396 *method_ptr = fld;
1397
1398 fld = DECL_CHAIN (fld);
1399 if (!fld || INTEGRAL_TYPE_P (fld)
1400 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1401 return false;
1402 if (delta)
1403 *delta = fld;
1404
1405 if (DECL_CHAIN (fld))
1406 return false;
1407
1408 return true;
1409 }
1410
1411 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1412 return the rhs of its defining statement. Otherwise return RHS as it
1413 is. */
1414
1415 static inline tree
1416 get_ssa_def_if_simple_copy (tree rhs)
1417 {
1418 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1419 {
1420 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1421
1422 if (gimple_assign_single_p (def_stmt))
1423 rhs = gimple_assign_rhs1 (def_stmt);
1424 else
1425 break;
1426 }
1427 return rhs;
1428 }
1429
1430 /* Simple linked list, describing known contents of an aggregate beforere
1431 call. */
1432
1433 struct ipa_known_agg_contents_list
1434 {
1435 /* Offset and size of the described part of the aggregate. */
1436 HOST_WIDE_INT offset, size;
1437 /* Known constant value or NULL if the contents is known to be unknown. */
1438 tree constant;
1439 /* Pointer to the next structure in the list. */
1440 struct ipa_known_agg_contents_list *next;
1441 };
1442
1443 /* Find the proper place in linked list of ipa_known_agg_contents_list
1444 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1445 unless there is a partial overlap, in which case return NULL, or such
1446 element is already there, in which case set *ALREADY_THERE to true. */
1447
1448 static struct ipa_known_agg_contents_list **
1449 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1450 HOST_WIDE_INT lhs_offset,
1451 HOST_WIDE_INT lhs_size,
1452 bool *already_there)
1453 {
1454 struct ipa_known_agg_contents_list **p = list;
1455 while (*p && (*p)->offset < lhs_offset)
1456 {
1457 if ((*p)->offset + (*p)->size > lhs_offset)
1458 return NULL;
1459 p = &(*p)->next;
1460 }
1461
1462 if (*p && (*p)->offset < lhs_offset + lhs_size)
1463 {
1464 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1465 /* We already know this value is subsequently overwritten with
1466 something else. */
1467 *already_there = true;
1468 else
1469 /* Otherwise this is a partial overlap which we cannot
1470 represent. */
1471 return NULL;
1472 }
1473 return p;
1474 }
1475
1476 /* Build aggregate jump function from LIST, assuming there are exactly
1477 CONST_COUNT constant entries there and that th offset of the passed argument
1478 is ARG_OFFSET and store it into JFUNC. */
1479
1480 static void
1481 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1482 int const_count, HOST_WIDE_INT arg_offset,
1483 struct ipa_jump_func *jfunc)
1484 {
1485 vec_alloc (jfunc->agg.items, const_count);
1486 while (list)
1487 {
1488 if (list->constant)
1489 {
1490 struct ipa_agg_jf_item item;
1491 item.offset = list->offset - arg_offset;
1492 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1493 item.value = unshare_expr_without_location (list->constant);
1494 jfunc->agg.items->quick_push (item);
1495 }
1496 list = list->next;
1497 }
1498 }
1499
1500 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1501 in ARG is filled in with constant values. ARG can either be an aggregate
1502 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1503 aggregate. JFUNC is the jump function into which the constants are
1504 subsequently stored. */
1505
1506 static void
1507 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1508 tree arg_type,
1509 struct ipa_jump_func *jfunc)
1510 {
1511 struct ipa_known_agg_contents_list *list = NULL;
1512 int item_count = 0, const_count = 0;
1513 HOST_WIDE_INT arg_offset, arg_size;
1514 gimple_stmt_iterator gsi;
1515 tree arg_base;
1516 bool check_ref, by_ref;
1517 ao_ref r;
1518
1519 /* The function operates in three stages. First, we prepare check_ref, r,
1520 arg_base and arg_offset based on what is actually passed as an actual
1521 argument. */
1522
1523 if (POINTER_TYPE_P (arg_type))
1524 {
1525 by_ref = true;
1526 if (TREE_CODE (arg) == SSA_NAME)
1527 {
1528 tree type_size;
1529 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1530 return;
1531 check_ref = true;
1532 arg_base = arg;
1533 arg_offset = 0;
1534 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1535 arg_size = tree_to_uhwi (type_size);
1536 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1537 }
1538 else if (TREE_CODE (arg) == ADDR_EXPR)
1539 {
1540 HOST_WIDE_INT arg_max_size;
1541
1542 arg = TREE_OPERAND (arg, 0);
1543 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1544 &arg_max_size);
1545 if (arg_max_size == -1
1546 || arg_max_size != arg_size
1547 || arg_offset < 0)
1548 return;
1549 if (DECL_P (arg_base))
1550 {
1551 check_ref = false;
1552 ao_ref_init (&r, arg_base);
1553 }
1554 else
1555 return;
1556 }
1557 else
1558 return;
1559 }
1560 else
1561 {
1562 HOST_WIDE_INT arg_max_size;
1563
1564 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1565
1566 by_ref = false;
1567 check_ref = false;
1568 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1569 &arg_max_size);
1570 if (arg_max_size == -1
1571 || arg_max_size != arg_size
1572 || arg_offset < 0)
1573 return;
1574
1575 ao_ref_init (&r, arg);
1576 }
1577
1578 /* Second stage walks back the BB, looks at individual statements and as long
1579 as it is confident of how the statements affect contents of the
1580 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1581 describing it. */
1582 gsi = gsi_for_stmt (call);
1583 gsi_prev (&gsi);
1584 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1585 {
1586 struct ipa_known_agg_contents_list *n, **p;
1587 gimple stmt = gsi_stmt (gsi);
1588 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1589 tree lhs, rhs, lhs_base;
1590
1591 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1592 continue;
1593 if (!gimple_assign_single_p (stmt))
1594 break;
1595
1596 lhs = gimple_assign_lhs (stmt);
1597 rhs = gimple_assign_rhs1 (stmt);
1598 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1599 || TREE_CODE (lhs) == BIT_FIELD_REF
1600 || contains_bitfld_component_ref_p (lhs))
1601 break;
1602
1603 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1604 &lhs_max_size);
1605 if (lhs_max_size == -1
1606 || lhs_max_size != lhs_size)
1607 break;
1608
1609 if (check_ref)
1610 {
1611 if (TREE_CODE (lhs_base) != MEM_REF
1612 || TREE_OPERAND (lhs_base, 0) != arg_base
1613 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1614 break;
1615 }
1616 else if (lhs_base != arg_base)
1617 {
1618 if (DECL_P (lhs_base))
1619 continue;
1620 else
1621 break;
1622 }
1623
1624 bool already_there = false;
1625 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1626 &already_there);
1627 if (!p)
1628 break;
1629 if (already_there)
1630 continue;
1631
1632 rhs = get_ssa_def_if_simple_copy (rhs);
1633 n = XALLOCA (struct ipa_known_agg_contents_list);
1634 n->size = lhs_size;
1635 n->offset = lhs_offset;
1636 if (is_gimple_ip_invariant (rhs))
1637 {
1638 n->constant = rhs;
1639 const_count++;
1640 }
1641 else
1642 n->constant = NULL_TREE;
1643 n->next = *p;
1644 *p = n;
1645
1646 item_count++;
1647 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1648 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1649 break;
1650 }
1651
1652 /* Third stage just goes over the list and creates an appropriate vector of
1653 ipa_agg_jf_item structures out of it, of sourse only if there are
1654 any known constants to begin with. */
1655
1656 if (const_count)
1657 {
1658 jfunc->agg.by_ref = by_ref;
1659 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1660 }
1661 }
1662
1663 static tree
1664 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1665 {
1666 int n;
1667 tree type = (e->callee
1668 ? TREE_TYPE (e->callee->decl)
1669 : gimple_call_fntype (e->call_stmt));
1670 tree t = TYPE_ARG_TYPES (type);
1671
1672 for (n = 0; n < i; n++)
1673 {
1674 if (!t)
1675 break;
1676 t = TREE_CHAIN (t);
1677 }
1678 if (t)
1679 return TREE_VALUE (t);
1680 if (!e->callee)
1681 return NULL;
1682 t = DECL_ARGUMENTS (e->callee->decl);
1683 for (n = 0; n < i; n++)
1684 {
1685 if (!t)
1686 return NULL;
1687 t = TREE_CHAIN (t);
1688 }
1689 if (t)
1690 return TREE_TYPE (t);
1691 return NULL;
1692 }
1693
1694 /* Compute jump function for all arguments of callsite CS and insert the
1695 information in the jump_functions array in the ipa_edge_args corresponding
1696 to this callsite. */
1697
1698 static void
1699 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1700 struct cgraph_edge *cs)
1701 {
1702 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1703 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1704 gcall *call = cs->call_stmt;
1705 int n, arg_num = gimple_call_num_args (call);
1706 bool useful_context = false;
1707
1708 if (arg_num == 0 || args->jump_functions)
1709 return;
1710 vec_safe_grow_cleared (args->jump_functions, arg_num);
1711 if (flag_devirtualize)
1712 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1713
1714 if (gimple_call_internal_p (call))
1715 return;
1716 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1717 return;
1718
1719 for (n = 0; n < arg_num; n++)
1720 {
1721 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1722 tree arg = gimple_call_arg (call, n);
1723 tree param_type = ipa_get_callee_param_type (cs, n);
1724 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1725 {
1726 tree instance;
1727 struct ipa_polymorphic_call_context context (cs->caller->decl,
1728 arg, cs->call_stmt,
1729 &instance);
1730 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1731 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1732 if (!context.useless_p ())
1733 useful_context = true;
1734 }
1735
1736 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1737 {
1738 unsigned HOST_WIDE_INT hwi_bitpos;
1739 unsigned align;
1740
1741 if (get_pointer_alignment_1 (arg, &align, &hwi_bitpos)
1742 && align > BITS_PER_UNIT)
1743 {
1744 jfunc->alignment.known = true;
1745 jfunc->alignment.align = align;
1746 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1747 }
1748 else
1749 gcc_assert (!jfunc->alignment.known);
1750 }
1751 else
1752 gcc_assert (!jfunc->alignment.known);
1753
1754 if (is_gimple_ip_invariant (arg))
1755 ipa_set_jf_constant (jfunc, arg, cs);
1756 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1757 && TREE_CODE (arg) == PARM_DECL)
1758 {
1759 int index = ipa_get_param_decl_index (info, arg);
1760
1761 gcc_assert (index >=0);
1762 /* Aggregate passed by value, check for pass-through, otherwise we
1763 will attempt to fill in aggregate contents later in this
1764 for cycle. */
1765 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1766 {
1767 ipa_set_jf_simple_pass_through (jfunc, index, false);
1768 continue;
1769 }
1770 }
1771 else if (TREE_CODE (arg) == SSA_NAME)
1772 {
1773 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1774 {
1775 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1776 if (index >= 0)
1777 {
1778 bool agg_p;
1779 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1780 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1781 }
1782 }
1783 else
1784 {
1785 gimple stmt = SSA_NAME_DEF_STMT (arg);
1786 if (is_gimple_assign (stmt))
1787 compute_complex_assign_jump_func (fbi, info, jfunc,
1788 call, stmt, arg, param_type);
1789 else if (gimple_code (stmt) == GIMPLE_PHI)
1790 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1791 call,
1792 as_a <gphi *> (stmt));
1793 }
1794 }
1795
1796 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1797 passed (because type conversions are ignored in gimple). Usually we can
1798 safely get type from function declaration, but in case of K&R prototypes or
1799 variadic functions we can try our luck with type of the pointer passed.
1800 TODO: Since we look for actual initialization of the memory object, we may better
1801 work out the type based on the memory stores we find. */
1802 if (!param_type)
1803 param_type = TREE_TYPE (arg);
1804
1805 if ((jfunc->type != IPA_JF_PASS_THROUGH
1806 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1807 && (jfunc->type != IPA_JF_ANCESTOR
1808 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1809 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1810 || POINTER_TYPE_P (param_type)))
1811 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1812 }
1813 if (!useful_context)
1814 vec_free (args->polymorphic_call_contexts);
1815 }
1816
1817 /* Compute jump functions for all edges - both direct and indirect - outgoing
1818 from BB. */
1819
1820 static void
1821 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1822 {
1823 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1824 int i;
1825 struct cgraph_edge *cs;
1826
1827 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1828 {
1829 struct cgraph_node *callee = cs->callee;
1830
1831 if (callee)
1832 {
1833 callee->ultimate_alias_target ();
1834 /* We do not need to bother analyzing calls to unknown functions
1835 unless they may become known during lto/whopr. */
1836 if (!callee->definition && !flag_lto)
1837 continue;
1838 }
1839 ipa_compute_jump_functions_for_edge (fbi, cs);
1840 }
1841 }
1842
1843 /* If STMT looks like a statement loading a value from a member pointer formal
1844 parameter, return that parameter and store the offset of the field to
1845 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1846 might be clobbered). If USE_DELTA, then we look for a use of the delta
1847 field rather than the pfn. */
1848
1849 static tree
1850 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1851 HOST_WIDE_INT *offset_p)
1852 {
1853 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1854
1855 if (!gimple_assign_single_p (stmt))
1856 return NULL_TREE;
1857
1858 rhs = gimple_assign_rhs1 (stmt);
1859 if (TREE_CODE (rhs) == COMPONENT_REF)
1860 {
1861 ref_field = TREE_OPERAND (rhs, 1);
1862 rhs = TREE_OPERAND (rhs, 0);
1863 }
1864 else
1865 ref_field = NULL_TREE;
1866 if (TREE_CODE (rhs) != MEM_REF)
1867 return NULL_TREE;
1868 rec = TREE_OPERAND (rhs, 0);
1869 if (TREE_CODE (rec) != ADDR_EXPR)
1870 return NULL_TREE;
1871 rec = TREE_OPERAND (rec, 0);
1872 if (TREE_CODE (rec) != PARM_DECL
1873 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1874 return NULL_TREE;
1875 ref_offset = TREE_OPERAND (rhs, 1);
1876
1877 if (use_delta)
1878 fld = delta_field;
1879 else
1880 fld = ptr_field;
1881 if (offset_p)
1882 *offset_p = int_bit_position (fld);
1883
1884 if (ref_field)
1885 {
1886 if (integer_nonzerop (ref_offset))
1887 return NULL_TREE;
1888 return ref_field == fld ? rec : NULL_TREE;
1889 }
1890 else
1891 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1892 : NULL_TREE;
1893 }
1894
1895 /* Returns true iff T is an SSA_NAME defined by a statement. */
1896
1897 static bool
1898 ipa_is_ssa_with_stmt_def (tree t)
1899 {
1900 if (TREE_CODE (t) == SSA_NAME
1901 && !SSA_NAME_IS_DEFAULT_DEF (t))
1902 return true;
1903 else
1904 return false;
1905 }
1906
1907 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1908 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1909 indirect call graph edge. */
1910
1911 static struct cgraph_edge *
1912 ipa_note_param_call (struct cgraph_node *node, int param_index,
1913 gcall *stmt)
1914 {
1915 struct cgraph_edge *cs;
1916
1917 cs = node->get_edge (stmt);
1918 cs->indirect_info->param_index = param_index;
1919 cs->indirect_info->agg_contents = 0;
1920 cs->indirect_info->member_ptr = 0;
1921 return cs;
1922 }
1923
1924 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1925 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1926 intermediate information about each formal parameter. Currently it checks
1927 whether the call calls a pointer that is a formal parameter and if so, the
1928 parameter is marked with the called flag and an indirect call graph edge
1929 describing the call is created. This is very simple for ordinary pointers
1930 represented in SSA but not-so-nice when it comes to member pointers. The
1931 ugly part of this function does nothing more than trying to match the
1932 pattern of such a call. An example of such a pattern is the gimple dump
1933 below, the call is on the last line:
1934
1935 <bb 2>:
1936 f$__delta_5 = f.__delta;
1937 f$__pfn_24 = f.__pfn;
1938
1939 or
1940 <bb 2>:
1941 f$__delta_5 = MEM[(struct *)&f];
1942 f$__pfn_24 = MEM[(struct *)&f + 4B];
1943
1944 and a few lines below:
1945
1946 <bb 5>
1947 D.2496_3 = (int) f$__pfn_24;
1948 D.2497_4 = D.2496_3 & 1;
1949 if (D.2497_4 != 0)
1950 goto <bb 3>;
1951 else
1952 goto <bb 4>;
1953
1954 <bb 6>:
1955 D.2500_7 = (unsigned int) f$__delta_5;
1956 D.2501_8 = &S + D.2500_7;
1957 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1958 D.2503_10 = *D.2502_9;
1959 D.2504_12 = f$__pfn_24 + -1;
1960 D.2505_13 = (unsigned int) D.2504_12;
1961 D.2506_14 = D.2503_10 + D.2505_13;
1962 D.2507_15 = *D.2506_14;
1963 iftmp.11_16 = (String:: *) D.2507_15;
1964
1965 <bb 7>:
1966 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1967 D.2500_19 = (unsigned int) f$__delta_5;
1968 D.2508_20 = &S + D.2500_19;
1969 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1970
1971 Such patterns are results of simple calls to a member pointer:
1972
1973 int doprinting (int (MyString::* f)(int) const)
1974 {
1975 MyString S ("somestring");
1976
1977 return (S.*f)(4);
1978 }
1979
1980 Moreover, the function also looks for called pointers loaded from aggregates
1981 passed by value or reference. */
1982
1983 static void
1984 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gcall *call,
1985 tree target)
1986 {
1987 struct ipa_node_params *info = fbi->info;
1988 HOST_WIDE_INT offset;
1989 bool by_ref;
1990
1991 if (SSA_NAME_IS_DEFAULT_DEF (target))
1992 {
1993 tree var = SSA_NAME_VAR (target);
1994 int index = ipa_get_param_decl_index (info, var);
1995 if (index >= 0)
1996 ipa_note_param_call (fbi->node, index, call);
1997 return;
1998 }
1999
2000 int index;
2001 gimple def = SSA_NAME_DEF_STMT (target);
2002 if (gimple_assign_single_p (def)
2003 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2004 gimple_assign_rhs1 (def), &index, &offset,
2005 NULL, &by_ref))
2006 {
2007 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2008 cs->indirect_info->offset = offset;
2009 cs->indirect_info->agg_contents = 1;
2010 cs->indirect_info->by_ref = by_ref;
2011 return;
2012 }
2013
2014 /* Now we need to try to match the complex pattern of calling a member
2015 pointer. */
2016 if (gimple_code (def) != GIMPLE_PHI
2017 || gimple_phi_num_args (def) != 2
2018 || !POINTER_TYPE_P (TREE_TYPE (target))
2019 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2020 return;
2021
2022 /* First, we need to check whether one of these is a load from a member
2023 pointer that is a parameter to this function. */
2024 tree n1 = PHI_ARG_DEF (def, 0);
2025 tree n2 = PHI_ARG_DEF (def, 1);
2026 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2027 return;
2028 gimple d1 = SSA_NAME_DEF_STMT (n1);
2029 gimple d2 = SSA_NAME_DEF_STMT (n2);
2030
2031 tree rec;
2032 basic_block bb, virt_bb;
2033 basic_block join = gimple_bb (def);
2034 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2035 {
2036 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2037 return;
2038
2039 bb = EDGE_PRED (join, 0)->src;
2040 virt_bb = gimple_bb (d2);
2041 }
2042 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2043 {
2044 bb = EDGE_PRED (join, 1)->src;
2045 virt_bb = gimple_bb (d1);
2046 }
2047 else
2048 return;
2049
2050 /* Second, we need to check that the basic blocks are laid out in the way
2051 corresponding to the pattern. */
2052
2053 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2054 || single_pred (virt_bb) != bb
2055 || single_succ (virt_bb) != join)
2056 return;
2057
2058 /* Third, let's see that the branching is done depending on the least
2059 significant bit of the pfn. */
2060
2061 gimple branch = last_stmt (bb);
2062 if (!branch || gimple_code (branch) != GIMPLE_COND)
2063 return;
2064
2065 if ((gimple_cond_code (branch) != NE_EXPR
2066 && gimple_cond_code (branch) != EQ_EXPR)
2067 || !integer_zerop (gimple_cond_rhs (branch)))
2068 return;
2069
2070 tree cond = gimple_cond_lhs (branch);
2071 if (!ipa_is_ssa_with_stmt_def (cond))
2072 return;
2073
2074 def = SSA_NAME_DEF_STMT (cond);
2075 if (!is_gimple_assign (def)
2076 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2077 || !integer_onep (gimple_assign_rhs2 (def)))
2078 return;
2079
2080 cond = gimple_assign_rhs1 (def);
2081 if (!ipa_is_ssa_with_stmt_def (cond))
2082 return;
2083
2084 def = SSA_NAME_DEF_STMT (cond);
2085
2086 if (is_gimple_assign (def)
2087 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2088 {
2089 cond = gimple_assign_rhs1 (def);
2090 if (!ipa_is_ssa_with_stmt_def (cond))
2091 return;
2092 def = SSA_NAME_DEF_STMT (cond);
2093 }
2094
2095 tree rec2;
2096 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2097 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2098 == ptrmemfunc_vbit_in_delta),
2099 NULL);
2100 if (rec != rec2)
2101 return;
2102
2103 index = ipa_get_param_decl_index (info, rec);
2104 if (index >= 0
2105 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2106 {
2107 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2108 cs->indirect_info->offset = offset;
2109 cs->indirect_info->agg_contents = 1;
2110 cs->indirect_info->member_ptr = 1;
2111 }
2112
2113 return;
2114 }
2115
2116 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2117 object referenced in the expression is a formal parameter of the caller
2118 FBI->node (described by FBI->info), create a call note for the
2119 statement. */
2120
2121 static void
2122 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2123 gcall *call, tree target)
2124 {
2125 tree obj = OBJ_TYPE_REF_OBJECT (target);
2126 int index;
2127 HOST_WIDE_INT anc_offset;
2128
2129 if (!flag_devirtualize)
2130 return;
2131
2132 if (TREE_CODE (obj) != SSA_NAME)
2133 return;
2134
2135 struct ipa_node_params *info = fbi->info;
2136 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2137 {
2138 struct ipa_jump_func jfunc;
2139 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2140 return;
2141
2142 anc_offset = 0;
2143 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2144 gcc_assert (index >= 0);
2145 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2146 call, &jfunc))
2147 return;
2148 }
2149 else
2150 {
2151 struct ipa_jump_func jfunc;
2152 gimple stmt = SSA_NAME_DEF_STMT (obj);
2153 tree expr;
2154
2155 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2156 if (!expr)
2157 return;
2158 index = ipa_get_param_decl_index (info,
2159 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2160 gcc_assert (index >= 0);
2161 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2162 call, &jfunc, anc_offset))
2163 return;
2164 }
2165
2166 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2167 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2168 ii->offset = anc_offset;
2169 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2170 ii->otr_type = obj_type_ref_class (target);
2171 ii->polymorphic = 1;
2172 }
2173
2174 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2175 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2176 containing intermediate information about each formal parameter. */
2177
2178 static void
2179 ipa_analyze_call_uses (struct func_body_info *fbi, gcall *call)
2180 {
2181 tree target = gimple_call_fn (call);
2182
2183 if (!target
2184 || (TREE_CODE (target) != SSA_NAME
2185 && !virtual_method_call_p (target)))
2186 return;
2187
2188 struct cgraph_edge *cs = fbi->node->get_edge (call);
2189 /* If we previously turned the call into a direct call, there is
2190 no need to analyze. */
2191 if (cs && !cs->indirect_unknown_callee)
2192 return;
2193
2194 if (cs->indirect_info->polymorphic && flag_devirtualize)
2195 {
2196 tree instance;
2197 tree target = gimple_call_fn (call);
2198 ipa_polymorphic_call_context context (current_function_decl,
2199 target, call, &instance);
2200
2201 gcc_checking_assert (cs->indirect_info->otr_type
2202 == obj_type_ref_class (target));
2203 gcc_checking_assert (cs->indirect_info->otr_token
2204 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2205
2206 cs->indirect_info->vptr_changed
2207 = !context.get_dynamic_type (instance,
2208 OBJ_TYPE_REF_OBJECT (target),
2209 obj_type_ref_class (target), call);
2210 cs->indirect_info->context = context;
2211 }
2212
2213 if (TREE_CODE (target) == SSA_NAME)
2214 ipa_analyze_indirect_call_uses (fbi, call, target);
2215 else if (virtual_method_call_p (target))
2216 ipa_analyze_virtual_call_uses (fbi, call, target);
2217 }
2218
2219
2220 /* Analyze the call statement STMT with respect to formal parameters (described
2221 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2222 formal parameters are called. */
2223
2224 static void
2225 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2226 {
2227 if (is_gimple_call (stmt))
2228 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2229 }
2230
2231 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2232 If OP is a parameter declaration, mark it as used in the info structure
2233 passed in DATA. */
2234
2235 static bool
2236 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2237 {
2238 struct ipa_node_params *info = (struct ipa_node_params *) data;
2239
2240 op = get_base_address (op);
2241 if (op
2242 && TREE_CODE (op) == PARM_DECL)
2243 {
2244 int index = ipa_get_param_decl_index (info, op);
2245 gcc_assert (index >= 0);
2246 ipa_set_param_used (info, index, true);
2247 }
2248
2249 return false;
2250 }
2251
2252 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2253 the findings in various structures of the associated ipa_node_params
2254 structure, such as parameter flags, notes etc. FBI holds various data about
2255 the function being analyzed. */
2256
2257 static void
2258 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2259 {
2260 gimple_stmt_iterator gsi;
2261 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2262 {
2263 gimple stmt = gsi_stmt (gsi);
2264
2265 if (is_gimple_debug (stmt))
2266 continue;
2267
2268 ipa_analyze_stmt_uses (fbi, stmt);
2269 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2270 visit_ref_for_mod_analysis,
2271 visit_ref_for_mod_analysis,
2272 visit_ref_for_mod_analysis);
2273 }
2274 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2275 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2276 visit_ref_for_mod_analysis,
2277 visit_ref_for_mod_analysis,
2278 visit_ref_for_mod_analysis);
2279 }
2280
2281 /* Calculate controlled uses of parameters of NODE. */
2282
2283 static void
2284 ipa_analyze_controlled_uses (struct cgraph_node *node)
2285 {
2286 struct ipa_node_params *info = IPA_NODE_REF (node);
2287
2288 for (int i = 0; i < ipa_get_param_count (info); i++)
2289 {
2290 tree parm = ipa_get_param (info, i);
2291 int controlled_uses = 0;
2292
2293 /* For SSA regs see if parameter is used. For non-SSA we compute
2294 the flag during modification analysis. */
2295 if (is_gimple_reg (parm))
2296 {
2297 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2298 parm);
2299 if (ddef && !has_zero_uses (ddef))
2300 {
2301 imm_use_iterator imm_iter;
2302 use_operand_p use_p;
2303
2304 ipa_set_param_used (info, i, true);
2305 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2306 if (!is_gimple_call (USE_STMT (use_p)))
2307 {
2308 if (!is_gimple_debug (USE_STMT (use_p)))
2309 {
2310 controlled_uses = IPA_UNDESCRIBED_USE;
2311 break;
2312 }
2313 }
2314 else
2315 controlled_uses++;
2316 }
2317 else
2318 controlled_uses = 0;
2319 }
2320 else
2321 controlled_uses = IPA_UNDESCRIBED_USE;
2322 ipa_set_controlled_uses (info, i, controlled_uses);
2323 }
2324 }
2325
2326 /* Free stuff in BI. */
2327
2328 static void
2329 free_ipa_bb_info (struct ipa_bb_info *bi)
2330 {
2331 bi->cg_edges.release ();
2332 bi->param_aa_statuses.release ();
2333 }
2334
2335 /* Dominator walker driving the analysis. */
2336
2337 class analysis_dom_walker : public dom_walker
2338 {
2339 public:
2340 analysis_dom_walker (struct func_body_info *fbi)
2341 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2342
2343 virtual void before_dom_children (basic_block);
2344
2345 private:
2346 struct func_body_info *m_fbi;
2347 };
2348
2349 void
2350 analysis_dom_walker::before_dom_children (basic_block bb)
2351 {
2352 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2353 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2354 }
2355
2356 /* Initialize the array describing properties of of formal parameters
2357 of NODE, analyze their uses and compute jump functions associated
2358 with actual arguments of calls from within NODE. */
2359
2360 void
2361 ipa_analyze_node (struct cgraph_node *node)
2362 {
2363 struct func_body_info fbi;
2364 struct ipa_node_params *info;
2365
2366 ipa_check_create_node_params ();
2367 ipa_check_create_edge_args ();
2368 info = IPA_NODE_REF (node);
2369
2370 if (info->analysis_done)
2371 return;
2372 info->analysis_done = 1;
2373
2374 if (ipa_func_spec_opts_forbid_analysis_p (node))
2375 {
2376 for (int i = 0; i < ipa_get_param_count (info); i++)
2377 {
2378 ipa_set_param_used (info, i, true);
2379 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2380 }
2381 return;
2382 }
2383
2384 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2385 push_cfun (func);
2386 calculate_dominance_info (CDI_DOMINATORS);
2387 ipa_initialize_node_params (node);
2388 ipa_analyze_controlled_uses (node);
2389
2390 fbi.node = node;
2391 fbi.info = IPA_NODE_REF (node);
2392 fbi.bb_infos = vNULL;
2393 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2394 fbi.param_count = ipa_get_param_count (info);
2395 fbi.aa_walked = 0;
2396
2397 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2398 {
2399 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2400 bi->cg_edges.safe_push (cs);
2401 }
2402
2403 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2404 {
2405 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2406 bi->cg_edges.safe_push (cs);
2407 }
2408
2409 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2410
2411 int i;
2412 struct ipa_bb_info *bi;
2413 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2414 free_ipa_bb_info (bi);
2415 fbi.bb_infos.release ();
2416 free_dominance_info (CDI_DOMINATORS);
2417 pop_cfun ();
2418 }
2419
2420 /* Update the jump functions associated with call graph edge E when the call
2421 graph edge CS is being inlined, assuming that E->caller is already (possibly
2422 indirectly) inlined into CS->callee and that E has not been inlined. */
2423
2424 static void
2425 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2426 struct cgraph_edge *e)
2427 {
2428 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2429 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2430 int count = ipa_get_cs_argument_count (args);
2431 int i;
2432
2433 for (i = 0; i < count; i++)
2434 {
2435 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2436 struct ipa_polymorphic_call_context *dst_ctx
2437 = ipa_get_ith_polymorhic_call_context (args, i);
2438
2439 if (dst->type == IPA_JF_ANCESTOR)
2440 {
2441 struct ipa_jump_func *src;
2442 int dst_fid = dst->value.ancestor.formal_id;
2443 struct ipa_polymorphic_call_context *src_ctx
2444 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2445
2446 /* Variable number of arguments can cause havoc if we try to access
2447 one that does not exist in the inlined edge. So make sure we
2448 don't. */
2449 if (dst_fid >= ipa_get_cs_argument_count (top))
2450 {
2451 ipa_set_jf_unknown (dst);
2452 continue;
2453 }
2454
2455 src = ipa_get_ith_jump_func (top, dst_fid);
2456
2457 if (src_ctx && !src_ctx->useless_p ())
2458 {
2459 struct ipa_polymorphic_call_context ctx = *src_ctx;
2460
2461 /* TODO: Make type preserved safe WRT contexts. */
2462 if (!ipa_get_jf_ancestor_type_preserved (dst))
2463 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2464 ctx.offset_by (dst->value.ancestor.offset);
2465 if (!ctx.useless_p ())
2466 {
2467 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2468 count);
2469 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2470 }
2471 dst_ctx->combine_with (ctx);
2472 }
2473
2474 if (src->agg.items
2475 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2476 {
2477 struct ipa_agg_jf_item *item;
2478 int j;
2479
2480 /* Currently we do not produce clobber aggregate jump functions,
2481 replace with merging when we do. */
2482 gcc_assert (!dst->agg.items);
2483
2484 dst->agg.items = vec_safe_copy (src->agg.items);
2485 dst->agg.by_ref = src->agg.by_ref;
2486 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2487 item->offset -= dst->value.ancestor.offset;
2488 }
2489
2490 if (src->type == IPA_JF_PASS_THROUGH
2491 && src->value.pass_through.operation == NOP_EXPR)
2492 {
2493 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2494 dst->value.ancestor.agg_preserved &=
2495 src->value.pass_through.agg_preserved;
2496 }
2497 else if (src->type == IPA_JF_ANCESTOR)
2498 {
2499 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2500 dst->value.ancestor.offset += src->value.ancestor.offset;
2501 dst->value.ancestor.agg_preserved &=
2502 src->value.ancestor.agg_preserved;
2503 }
2504 else
2505 ipa_set_jf_unknown (dst);
2506 }
2507 else if (dst->type == IPA_JF_PASS_THROUGH)
2508 {
2509 struct ipa_jump_func *src;
2510 /* We must check range due to calls with variable number of arguments
2511 and we cannot combine jump functions with operations. */
2512 if (dst->value.pass_through.operation == NOP_EXPR
2513 && (dst->value.pass_through.formal_id
2514 < ipa_get_cs_argument_count (top)))
2515 {
2516 int dst_fid = dst->value.pass_through.formal_id;
2517 src = ipa_get_ith_jump_func (top, dst_fid);
2518 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2519 struct ipa_polymorphic_call_context *src_ctx
2520 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2521
2522 if (src_ctx && !src_ctx->useless_p ())
2523 {
2524 struct ipa_polymorphic_call_context ctx = *src_ctx;
2525
2526 /* TODO: Make type preserved safe WRT contexts. */
2527 if (!ipa_get_jf_pass_through_type_preserved (dst))
2528 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2529 if (!ctx.useless_p ())
2530 {
2531 if (!dst_ctx)
2532 {
2533 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2534 count);
2535 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2536 }
2537 dst_ctx->combine_with (ctx);
2538 }
2539 }
2540 switch (src->type)
2541 {
2542 case IPA_JF_UNKNOWN:
2543 ipa_set_jf_unknown (dst);
2544 break;
2545 case IPA_JF_CONST:
2546 ipa_set_jf_cst_copy (dst, src);
2547 break;
2548
2549 case IPA_JF_PASS_THROUGH:
2550 {
2551 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2552 enum tree_code operation;
2553 operation = ipa_get_jf_pass_through_operation (src);
2554
2555 if (operation == NOP_EXPR)
2556 {
2557 bool agg_p;
2558 agg_p = dst_agg_p
2559 && ipa_get_jf_pass_through_agg_preserved (src);
2560 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2561 }
2562 else
2563 {
2564 tree operand = ipa_get_jf_pass_through_operand (src);
2565 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2566 operation);
2567 }
2568 break;
2569 }
2570 case IPA_JF_ANCESTOR:
2571 {
2572 bool agg_p;
2573 agg_p = dst_agg_p
2574 && ipa_get_jf_ancestor_agg_preserved (src);
2575 ipa_set_ancestor_jf (dst,
2576 ipa_get_jf_ancestor_offset (src),
2577 ipa_get_jf_ancestor_formal_id (src),
2578 agg_p);
2579 break;
2580 }
2581 default:
2582 gcc_unreachable ();
2583 }
2584
2585 if (src->agg.items
2586 && (dst_agg_p || !src->agg.by_ref))
2587 {
2588 /* Currently we do not produce clobber aggregate jump
2589 functions, replace with merging when we do. */
2590 gcc_assert (!dst->agg.items);
2591
2592 dst->agg.by_ref = src->agg.by_ref;
2593 dst->agg.items = vec_safe_copy (src->agg.items);
2594 }
2595 }
2596 else
2597 ipa_set_jf_unknown (dst);
2598 }
2599 }
2600 }
2601
2602 /* If TARGET is an addr_expr of a function declaration, make it the
2603 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2604 Otherwise, return NULL. */
2605
2606 struct cgraph_edge *
2607 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2608 bool speculative)
2609 {
2610 struct cgraph_node *callee;
2611 struct inline_edge_summary *es = inline_edge_summary (ie);
2612 bool unreachable = false;
2613
2614 if (TREE_CODE (target) == ADDR_EXPR)
2615 target = TREE_OPERAND (target, 0);
2616 if (TREE_CODE (target) != FUNCTION_DECL)
2617 {
2618 target = canonicalize_constructor_val (target, NULL);
2619 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2620 {
2621 if (ie->indirect_info->member_ptr)
2622 /* Member pointer call that goes through a VMT lookup. */
2623 return NULL;
2624
2625 if (dump_enabled_p ())
2626 {
2627 location_t loc = gimple_location_safe (ie->call_stmt);
2628 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2629 "discovered direct call to non-function in %s/%i, "
2630 "making it __builtin_unreachable\n",
2631 ie->caller->name (), ie->caller->order);
2632 }
2633
2634 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2635 callee = cgraph_node::get_create (target);
2636 unreachable = true;
2637 }
2638 else
2639 callee = cgraph_node::get (target);
2640 }
2641 else
2642 callee = cgraph_node::get (target);
2643
2644 /* Because may-edges are not explicitely represented and vtable may be external,
2645 we may create the first reference to the object in the unit. */
2646 if (!callee || callee->global.inlined_to)
2647 {
2648
2649 /* We are better to ensure we can refer to it.
2650 In the case of static functions we are out of luck, since we already
2651 removed its body. In the case of public functions we may or may
2652 not introduce the reference. */
2653 if (!canonicalize_constructor_val (target, NULL)
2654 || !TREE_PUBLIC (target))
2655 {
2656 if (dump_file)
2657 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2658 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2659 xstrdup (ie->caller->name ()),
2660 ie->caller->order,
2661 xstrdup (ie->callee->name ()),
2662 ie->callee->order);
2663 return NULL;
2664 }
2665 callee = cgraph_node::get_create (target);
2666 }
2667
2668 /* If the edge is already speculated. */
2669 if (speculative && ie->speculative)
2670 {
2671 struct cgraph_edge *e2;
2672 struct ipa_ref *ref;
2673 ie->speculative_call_info (e2, ie, ref);
2674 if (e2->callee->ultimate_alias_target ()
2675 != callee->ultimate_alias_target ())
2676 {
2677 if (dump_file)
2678 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2679 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2680 xstrdup (ie->caller->name ()),
2681 ie->caller->order,
2682 xstrdup (callee->name ()),
2683 callee->order,
2684 xstrdup (e2->callee->name ()),
2685 e2->callee->order);
2686 }
2687 else
2688 {
2689 if (dump_file)
2690 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2691 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2692 xstrdup (ie->caller->name ()),
2693 ie->caller->order,
2694 xstrdup (callee->name ()),
2695 callee->order);
2696 }
2697 return NULL;
2698 }
2699
2700 if (!dbg_cnt (devirt))
2701 return NULL;
2702
2703 ipa_check_create_node_params ();
2704
2705 /* We can not make edges to inline clones. It is bug that someone removed
2706 the cgraph node too early. */
2707 gcc_assert (!callee->global.inlined_to);
2708
2709 if (dump_file && !unreachable)
2710 {
2711 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2712 "(%s/%i -> %s/%i), for stmt ",
2713 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2714 speculative ? "speculative" : "known",
2715 xstrdup (ie->caller->name ()),
2716 ie->caller->order,
2717 xstrdup (callee->name ()),
2718 callee->order);
2719 if (ie->call_stmt)
2720 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2721 else
2722 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2723 }
2724 if (dump_enabled_p ())
2725 {
2726 location_t loc = gimple_location_safe (ie->call_stmt);
2727
2728 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2729 "converting indirect call in %s to direct call to %s\n",
2730 ie->caller->name (), callee->name ());
2731 }
2732 if (!speculative)
2733 ie = ie->make_direct (callee);
2734 else
2735 {
2736 if (!callee->can_be_discarded_p ())
2737 {
2738 cgraph_node *alias;
2739 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2740 if (alias)
2741 callee = alias;
2742 }
2743 ie = ie->make_speculative
2744 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2745 }
2746 es = inline_edge_summary (ie);
2747 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2748 - eni_size_weights.call_cost);
2749 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2750 - eni_time_weights.call_cost);
2751
2752 return ie;
2753 }
2754
2755 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2756 return NULL if there is not any. BY_REF specifies whether the value has to
2757 be passed by reference or by value. */
2758
2759 tree
2760 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2761 HOST_WIDE_INT offset, bool by_ref)
2762 {
2763 struct ipa_agg_jf_item *item;
2764 int i;
2765
2766 if (by_ref != agg->by_ref)
2767 return NULL;
2768
2769 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2770 if (item->offset == offset)
2771 {
2772 /* Currently we do not have clobber values, return NULL for them once
2773 we do. */
2774 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2775 return item->value;
2776 }
2777 return NULL;
2778 }
2779
2780 /* Remove a reference to SYMBOL from the list of references of a node given by
2781 reference description RDESC. Return true if the reference has been
2782 successfully found and removed. */
2783
2784 static bool
2785 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2786 {
2787 struct ipa_ref *to_del;
2788 struct cgraph_edge *origin;
2789
2790 origin = rdesc->cs;
2791 if (!origin)
2792 return false;
2793 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2794 origin->lto_stmt_uid);
2795 if (!to_del)
2796 return false;
2797
2798 to_del->remove_reference ();
2799 if (dump_file)
2800 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2801 xstrdup (origin->caller->name ()),
2802 origin->caller->order, xstrdup (symbol->name ()));
2803 return true;
2804 }
2805
2806 /* If JFUNC has a reference description with refcount different from
2807 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2808 NULL. JFUNC must be a constant jump function. */
2809
2810 static struct ipa_cst_ref_desc *
2811 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2812 {
2813 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2814 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2815 return rdesc;
2816 else
2817 return NULL;
2818 }
2819
2820 /* If the value of constant jump function JFUNC is an address of a function
2821 declaration, return the associated call graph node. Otherwise return
2822 NULL. */
2823
2824 static cgraph_node *
2825 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2826 {
2827 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2828 tree cst = ipa_get_jf_constant (jfunc);
2829 if (TREE_CODE (cst) != ADDR_EXPR
2830 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2831 return NULL;
2832
2833 return cgraph_node::get (TREE_OPERAND (cst, 0));
2834 }
2835
2836
2837 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2838 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2839 the edge specified in the rdesc. Return false if either the symbol or the
2840 reference could not be found, otherwise return true. */
2841
2842 static bool
2843 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2844 {
2845 struct ipa_cst_ref_desc *rdesc;
2846 if (jfunc->type == IPA_JF_CONST
2847 && (rdesc = jfunc_rdesc_usable (jfunc))
2848 && --rdesc->refcount == 0)
2849 {
2850 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2851 if (!symbol)
2852 return false;
2853
2854 return remove_described_reference (symbol, rdesc);
2855 }
2856 return true;
2857 }
2858
2859 /* Try to find a destination for indirect edge IE that corresponds to a simple
2860 call or a call of a member function pointer and where the destination is a
2861 pointer formal parameter described by jump function JFUNC. If it can be
2862 determined, return the newly direct edge, otherwise return NULL.
2863 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2864
2865 static struct cgraph_edge *
2866 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2867 struct ipa_jump_func *jfunc,
2868 struct ipa_node_params *new_root_info)
2869 {
2870 struct cgraph_edge *cs;
2871 tree target;
2872 bool agg_contents = ie->indirect_info->agg_contents;
2873
2874 if (ie->indirect_info->agg_contents)
2875 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2876 ie->indirect_info->offset,
2877 ie->indirect_info->by_ref);
2878 else
2879 target = ipa_value_from_jfunc (new_root_info, jfunc);
2880 if (!target)
2881 return NULL;
2882 cs = ipa_make_edge_direct_to_target (ie, target);
2883
2884 if (cs && !agg_contents)
2885 {
2886 bool ok;
2887 gcc_checking_assert (cs->callee
2888 && (cs != ie
2889 || jfunc->type != IPA_JF_CONST
2890 || !cgraph_node_for_jfunc (jfunc)
2891 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2892 ok = try_decrement_rdesc_refcount (jfunc);
2893 gcc_checking_assert (ok);
2894 }
2895
2896 return cs;
2897 }
2898
2899 /* Return the target to be used in cases of impossible devirtualization. IE
2900 and target (the latter can be NULL) are dumped when dumping is enabled. */
2901
2902 tree
2903 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2904 {
2905 if (dump_file)
2906 {
2907 if (target)
2908 fprintf (dump_file,
2909 "Type inconsistent devirtualization: %s/%i->%s\n",
2910 ie->caller->name (), ie->caller->order,
2911 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2912 else
2913 fprintf (dump_file,
2914 "No devirtualization target in %s/%i\n",
2915 ie->caller->name (), ie->caller->order);
2916 }
2917 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2918 cgraph_node::get_create (new_target);
2919 return new_target;
2920 }
2921
2922 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2923 call based on a formal parameter which is described by jump function JFUNC
2924 and if it can be determined, make it direct and return the direct edge.
2925 Otherwise, return NULL. CTX describes the polymorphic context that the
2926 parameter the call is based on brings along with it. */
2927
2928 static struct cgraph_edge *
2929 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2930 struct ipa_jump_func *jfunc,
2931 struct ipa_polymorphic_call_context ctx)
2932 {
2933 tree target = NULL;
2934 bool speculative = false;
2935
2936 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2937 return NULL;
2938
2939 gcc_assert (!ie->indirect_info->by_ref);
2940
2941 /* Try to do lookup via known virtual table pointer value. */
2942 if (!ie->indirect_info->vptr_changed
2943 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
2944 {
2945 tree vtable;
2946 unsigned HOST_WIDE_INT offset;
2947 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2948 ie->indirect_info->offset,
2949 true);
2950 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2951 {
2952 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2953 vtable, offset);
2954 if (t)
2955 {
2956 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2957 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2958 || !possible_polymorphic_call_target_p
2959 (ie, cgraph_node::get (t)))
2960 {
2961 /* Do not speculate builtin_unreachable, it is stpid! */
2962 if (!ie->indirect_info->vptr_changed)
2963 target = ipa_impossible_devirt_target (ie, target);
2964 }
2965 else
2966 {
2967 target = t;
2968 speculative = ie->indirect_info->vptr_changed;
2969 }
2970 }
2971 }
2972 }
2973
2974 ipa_polymorphic_call_context ie_context (ie);
2975 vec <cgraph_node *>targets;
2976 bool final;
2977
2978 ctx.offset_by (ie->indirect_info->offset);
2979 if (ie->indirect_info->vptr_changed)
2980 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2981 ie->indirect_info->otr_type);
2982 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
2983 targets = possible_polymorphic_call_targets
2984 (ie->indirect_info->otr_type,
2985 ie->indirect_info->otr_token,
2986 ctx, &final);
2987 if (final && targets.length () <= 1)
2988 {
2989 if (targets.length () == 1)
2990 target = targets[0]->decl;
2991 else
2992 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2993 }
2994 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
2995 && !ie->speculative && ie->maybe_hot_p ())
2996 {
2997 cgraph_node *n;
2998 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
2999 ie->indirect_info->otr_token,
3000 ie->indirect_info->context);
3001 if (n)
3002 {
3003 target = n->decl;
3004 speculative = true;
3005 }
3006 }
3007
3008 if (target)
3009 {
3010 if (!possible_polymorphic_call_target_p
3011 (ie, cgraph_node::get_create (target)))
3012 {
3013 if (speculative)
3014 return NULL;
3015 target = ipa_impossible_devirt_target (ie, target);
3016 }
3017 return ipa_make_edge_direct_to_target (ie, target, speculative);
3018 }
3019 else
3020 return NULL;
3021 }
3022
3023 /* Update the param called notes associated with NODE when CS is being inlined,
3024 assuming NODE is (potentially indirectly) inlined into CS->callee.
3025 Moreover, if the callee is discovered to be constant, create a new cgraph
3026 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3027 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3028
3029 static bool
3030 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3031 struct cgraph_node *node,
3032 vec<cgraph_edge *> *new_edges)
3033 {
3034 struct ipa_edge_args *top;
3035 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3036 struct ipa_node_params *new_root_info;
3037 bool res = false;
3038
3039 ipa_check_create_edge_args ();
3040 top = IPA_EDGE_REF (cs);
3041 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3042 ? cs->caller->global.inlined_to
3043 : cs->caller);
3044
3045 for (ie = node->indirect_calls; ie; ie = next_ie)
3046 {
3047 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3048 struct ipa_jump_func *jfunc;
3049 int param_index;
3050
3051 next_ie = ie->next_callee;
3052
3053 if (ici->param_index == -1)
3054 continue;
3055
3056 /* We must check range due to calls with variable number of arguments: */
3057 if (ici->param_index >= ipa_get_cs_argument_count (top))
3058 {
3059 ici->param_index = -1;
3060 continue;
3061 }
3062
3063 param_index = ici->param_index;
3064 jfunc = ipa_get_ith_jump_func (top, param_index);
3065
3066 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3067 new_direct_edge = NULL;
3068 else if (ici->polymorphic)
3069 {
3070 ipa_polymorphic_call_context ctx;
3071 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3072 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3073 }
3074 else
3075 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3076 new_root_info);
3077 /* If speculation was removed, then we need to do nothing. */
3078 if (new_direct_edge && new_direct_edge != ie)
3079 {
3080 new_direct_edge->indirect_inlining_edge = 1;
3081 top = IPA_EDGE_REF (cs);
3082 res = true;
3083 }
3084 else if (new_direct_edge)
3085 {
3086 new_direct_edge->indirect_inlining_edge = 1;
3087 if (new_direct_edge->call_stmt)
3088 new_direct_edge->call_stmt_cannot_inline_p
3089 = !gimple_check_call_matching_types (
3090 new_direct_edge->call_stmt,
3091 new_direct_edge->callee->decl, false);
3092 if (new_edges)
3093 {
3094 new_edges->safe_push (new_direct_edge);
3095 res = true;
3096 }
3097 top = IPA_EDGE_REF (cs);
3098 }
3099 else if (jfunc->type == IPA_JF_PASS_THROUGH
3100 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3101 {
3102 if ((ici->agg_contents
3103 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3104 || (ici->polymorphic
3105 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3106 ici->param_index = -1;
3107 else
3108 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3109 }
3110 else if (jfunc->type == IPA_JF_ANCESTOR)
3111 {
3112 if ((ici->agg_contents
3113 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3114 || (ici->polymorphic
3115 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3116 ici->param_index = -1;
3117 else
3118 {
3119 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3120 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3121 }
3122 }
3123 else
3124 /* Either we can find a destination for this edge now or never. */
3125 ici->param_index = -1;
3126 }
3127
3128 return res;
3129 }
3130
3131 /* Recursively traverse subtree of NODE (including node) made of inlined
3132 cgraph_edges when CS has been inlined and invoke
3133 update_indirect_edges_after_inlining on all nodes and
3134 update_jump_functions_after_inlining on all non-inlined edges that lead out
3135 of this subtree. Newly discovered indirect edges will be added to
3136 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3137 created. */
3138
3139 static bool
3140 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3141 struct cgraph_node *node,
3142 vec<cgraph_edge *> *new_edges)
3143 {
3144 struct cgraph_edge *e;
3145 bool res;
3146
3147 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3148
3149 for (e = node->callees; e; e = e->next_callee)
3150 if (!e->inline_failed)
3151 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3152 else
3153 update_jump_functions_after_inlining (cs, e);
3154 for (e = node->indirect_calls; e; e = e->next_callee)
3155 update_jump_functions_after_inlining (cs, e);
3156
3157 return res;
3158 }
3159
3160 /* Combine two controlled uses counts as done during inlining. */
3161
3162 static int
3163 combine_controlled_uses_counters (int c, int d)
3164 {
3165 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3166 return IPA_UNDESCRIBED_USE;
3167 else
3168 return c + d - 1;
3169 }
3170
3171 /* Propagate number of controlled users from CS->caleee to the new root of the
3172 tree of inlined nodes. */
3173
3174 static void
3175 propagate_controlled_uses (struct cgraph_edge *cs)
3176 {
3177 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3178 struct cgraph_node *new_root = cs->caller->global.inlined_to
3179 ? cs->caller->global.inlined_to : cs->caller;
3180 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3181 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3182 int count, i;
3183
3184 count = MIN (ipa_get_cs_argument_count (args),
3185 ipa_get_param_count (old_root_info));
3186 for (i = 0; i < count; i++)
3187 {
3188 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3189 struct ipa_cst_ref_desc *rdesc;
3190
3191 if (jf->type == IPA_JF_PASS_THROUGH)
3192 {
3193 int src_idx, c, d;
3194 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3195 c = ipa_get_controlled_uses (new_root_info, src_idx);
3196 d = ipa_get_controlled_uses (old_root_info, i);
3197
3198 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3199 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3200 c = combine_controlled_uses_counters (c, d);
3201 ipa_set_controlled_uses (new_root_info, src_idx, c);
3202 if (c == 0 && new_root_info->ipcp_orig_node)
3203 {
3204 struct cgraph_node *n;
3205 struct ipa_ref *ref;
3206 tree t = new_root_info->known_csts[src_idx];
3207
3208 if (t && TREE_CODE (t) == ADDR_EXPR
3209 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3210 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3211 && (ref = new_root->find_reference (n, NULL, 0)))
3212 {
3213 if (dump_file)
3214 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3215 "reference from %s/%i to %s/%i.\n",
3216 xstrdup (new_root->name ()),
3217 new_root->order,
3218 xstrdup (n->name ()), n->order);
3219 ref->remove_reference ();
3220 }
3221 }
3222 }
3223 else if (jf->type == IPA_JF_CONST
3224 && (rdesc = jfunc_rdesc_usable (jf)))
3225 {
3226 int d = ipa_get_controlled_uses (old_root_info, i);
3227 int c = rdesc->refcount;
3228 rdesc->refcount = combine_controlled_uses_counters (c, d);
3229 if (rdesc->refcount == 0)
3230 {
3231 tree cst = ipa_get_jf_constant (jf);
3232 struct cgraph_node *n;
3233 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3234 && TREE_CODE (TREE_OPERAND (cst, 0))
3235 == FUNCTION_DECL);
3236 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3237 if (n)
3238 {
3239 struct cgraph_node *clone;
3240 bool ok;
3241 ok = remove_described_reference (n, rdesc);
3242 gcc_checking_assert (ok);
3243
3244 clone = cs->caller;
3245 while (clone->global.inlined_to
3246 && clone != rdesc->cs->caller
3247 && IPA_NODE_REF (clone)->ipcp_orig_node)
3248 {
3249 struct ipa_ref *ref;
3250 ref = clone->find_reference (n, NULL, 0);
3251 if (ref)
3252 {
3253 if (dump_file)
3254 fprintf (dump_file, "ipa-prop: Removing "
3255 "cloning-created reference "
3256 "from %s/%i to %s/%i.\n",
3257 xstrdup (clone->name ()),
3258 clone->order,
3259 xstrdup (n->name ()),
3260 n->order);
3261 ref->remove_reference ();
3262 }
3263 clone = clone->callers->caller;
3264 }
3265 }
3266 }
3267 }
3268 }
3269
3270 for (i = ipa_get_param_count (old_root_info);
3271 i < ipa_get_cs_argument_count (args);
3272 i++)
3273 {
3274 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3275
3276 if (jf->type == IPA_JF_CONST)
3277 {
3278 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3279 if (rdesc)
3280 rdesc->refcount = IPA_UNDESCRIBED_USE;
3281 }
3282 else if (jf->type == IPA_JF_PASS_THROUGH)
3283 ipa_set_controlled_uses (new_root_info,
3284 jf->value.pass_through.formal_id,
3285 IPA_UNDESCRIBED_USE);
3286 }
3287 }
3288
3289 /* Update jump functions and call note functions on inlining the call site CS.
3290 CS is expected to lead to a node already cloned by
3291 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3292 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3293 created. */
3294
3295 bool
3296 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3297 vec<cgraph_edge *> *new_edges)
3298 {
3299 bool changed;
3300 /* Do nothing if the preparation phase has not been carried out yet
3301 (i.e. during early inlining). */
3302 if (!ipa_node_params_vector.exists ())
3303 return false;
3304 gcc_assert (ipa_edge_args_vector);
3305
3306 propagate_controlled_uses (cs);
3307 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3308
3309 return changed;
3310 }
3311
3312 /* Frees all dynamically allocated structures that the argument info points
3313 to. */
3314
3315 void
3316 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3317 {
3318 vec_free (args->jump_functions);
3319 memset (args, 0, sizeof (*args));
3320 }
3321
3322 /* Free all ipa_edge structures. */
3323
3324 void
3325 ipa_free_all_edge_args (void)
3326 {
3327 int i;
3328 struct ipa_edge_args *args;
3329
3330 if (!ipa_edge_args_vector)
3331 return;
3332
3333 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3334 ipa_free_edge_args_substructures (args);
3335
3336 vec_free (ipa_edge_args_vector);
3337 }
3338
3339 /* Frees all dynamically allocated structures that the param info points
3340 to. */
3341
3342 void
3343 ipa_free_node_params_substructures (struct ipa_node_params *info)
3344 {
3345 info->descriptors.release ();
3346 free (info->lattices);
3347 /* Lattice values and their sources are deallocated with their alocation
3348 pool. */
3349 info->known_csts.release ();
3350 info->known_contexts.release ();
3351 memset (info, 0, sizeof (*info));
3352 }
3353
3354 /* Free all ipa_node_params structures. */
3355
3356 void
3357 ipa_free_all_node_params (void)
3358 {
3359 int i;
3360 struct ipa_node_params *info;
3361
3362 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3363 ipa_free_node_params_substructures (info);
3364
3365 ipa_node_params_vector.release ();
3366 }
3367
3368 /* Grow ipcp_transformations if necessary. */
3369
3370 void
3371 ipcp_grow_transformations_if_necessary (void)
3372 {
3373 if (vec_safe_length (ipcp_transformations)
3374 <= (unsigned) symtab->cgraph_max_uid)
3375 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3376 }
3377
3378 /* Set the aggregate replacements of NODE to be AGGVALS. */
3379
3380 void
3381 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3382 struct ipa_agg_replacement_value *aggvals)
3383 {
3384 ipcp_grow_transformations_if_necessary ();
3385 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3386 }
3387
3388 /* Hook that is called by cgraph.c when an edge is removed. */
3389
3390 static void
3391 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3392 {
3393 struct ipa_edge_args *args;
3394
3395 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3396 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3397 return;
3398
3399 args = IPA_EDGE_REF (cs);
3400 if (args->jump_functions)
3401 {
3402 struct ipa_jump_func *jf;
3403 int i;
3404 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3405 {
3406 struct ipa_cst_ref_desc *rdesc;
3407 try_decrement_rdesc_refcount (jf);
3408 if (jf->type == IPA_JF_CONST
3409 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3410 && rdesc->cs == cs)
3411 rdesc->cs = NULL;
3412 }
3413 }
3414
3415 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3416 }
3417
3418 /* Hook that is called by cgraph.c when a node is removed. */
3419
3420 static void
3421 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3422 {
3423 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3424 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3425 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3426 if (vec_safe_length (ipcp_transformations) > (unsigned)node->uid)
3427 {
3428 (*ipcp_transformations)[(unsigned)node->uid].agg_values = NULL;
3429 (*ipcp_transformations)[(unsigned)node->uid].alignments = NULL;
3430 }
3431 }
3432
3433 /* Hook that is called by cgraph.c when an edge is duplicated. */
3434
3435 static void
3436 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3437 __attribute__((unused)) void *data)
3438 {
3439 struct ipa_edge_args *old_args, *new_args;
3440 unsigned int i;
3441
3442 ipa_check_create_edge_args ();
3443
3444 old_args = IPA_EDGE_REF (src);
3445 new_args = IPA_EDGE_REF (dst);
3446
3447 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3448 if (old_args->polymorphic_call_contexts)
3449 new_args->polymorphic_call_contexts
3450 = vec_safe_copy (old_args->polymorphic_call_contexts);
3451
3452 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3453 {
3454 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3455 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3456
3457 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3458
3459 if (src_jf->type == IPA_JF_CONST)
3460 {
3461 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3462
3463 if (!src_rdesc)
3464 dst_jf->value.constant.rdesc = NULL;
3465 else if (src->caller == dst->caller)
3466 {
3467 struct ipa_ref *ref;
3468 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3469 gcc_checking_assert (n);
3470 ref = src->caller->find_reference (n, src->call_stmt,
3471 src->lto_stmt_uid);
3472 gcc_checking_assert (ref);
3473 dst->caller->clone_reference (ref, ref->stmt);
3474
3475 gcc_checking_assert (ipa_refdesc_pool);
3476 struct ipa_cst_ref_desc *dst_rdesc
3477 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3478 dst_rdesc->cs = dst;
3479 dst_rdesc->refcount = src_rdesc->refcount;
3480 dst_rdesc->next_duplicate = NULL;
3481 dst_jf->value.constant.rdesc = dst_rdesc;
3482 }
3483 else if (src_rdesc->cs == src)
3484 {
3485 struct ipa_cst_ref_desc *dst_rdesc;
3486 gcc_checking_assert (ipa_refdesc_pool);
3487 dst_rdesc
3488 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3489 dst_rdesc->cs = dst;
3490 dst_rdesc->refcount = src_rdesc->refcount;
3491 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3492 src_rdesc->next_duplicate = dst_rdesc;
3493 dst_jf->value.constant.rdesc = dst_rdesc;
3494 }
3495 else
3496 {
3497 struct ipa_cst_ref_desc *dst_rdesc;
3498 /* This can happen during inlining, when a JFUNC can refer to a
3499 reference taken in a function up in the tree of inline clones.
3500 We need to find the duplicate that refers to our tree of
3501 inline clones. */
3502
3503 gcc_assert (dst->caller->global.inlined_to);
3504 for (dst_rdesc = src_rdesc->next_duplicate;
3505 dst_rdesc;
3506 dst_rdesc = dst_rdesc->next_duplicate)
3507 {
3508 struct cgraph_node *top;
3509 top = dst_rdesc->cs->caller->global.inlined_to
3510 ? dst_rdesc->cs->caller->global.inlined_to
3511 : dst_rdesc->cs->caller;
3512 if (dst->caller->global.inlined_to == top)
3513 break;
3514 }
3515 gcc_assert (dst_rdesc);
3516 dst_jf->value.constant.rdesc = dst_rdesc;
3517 }
3518 }
3519 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3520 && src->caller == dst->caller)
3521 {
3522 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3523 ? dst->caller->global.inlined_to : dst->caller;
3524 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3525 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3526
3527 int c = ipa_get_controlled_uses (root_info, idx);
3528 if (c != IPA_UNDESCRIBED_USE)
3529 {
3530 c++;
3531 ipa_set_controlled_uses (root_info, idx, c);
3532 }
3533 }
3534 }
3535 }
3536
3537 /* Hook that is called by cgraph.c when a node is duplicated. */
3538
3539 static void
3540 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3541 ATTRIBUTE_UNUSED void *data)
3542 {
3543 struct ipa_node_params *old_info, *new_info;
3544 struct ipa_agg_replacement_value *old_av, *new_av;
3545
3546 ipa_check_create_node_params ();
3547 old_info = IPA_NODE_REF (src);
3548 new_info = IPA_NODE_REF (dst);
3549
3550 new_info->descriptors = old_info->descriptors.copy ();
3551 new_info->lattices = NULL;
3552 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3553
3554 new_info->analysis_done = old_info->analysis_done;
3555 new_info->node_enqueued = old_info->node_enqueued;
3556
3557 old_av = ipa_get_agg_replacements_for_node (src);
3558 if (old_av)
3559 {
3560 new_av = NULL;
3561 while (old_av)
3562 {
3563 struct ipa_agg_replacement_value *v;
3564
3565 v = ggc_alloc<ipa_agg_replacement_value> ();
3566 memcpy (v, old_av, sizeof (*v));
3567 v->next = new_av;
3568 new_av = v;
3569 old_av = old_av->next;
3570 }
3571 ipa_set_node_agg_value_chain (dst, new_av);
3572 }
3573
3574 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3575
3576 if (src_trans && vec_safe_length (src_trans->alignments) > 0)
3577 {
3578 ipcp_grow_transformations_if_necessary ();
3579 src_trans = ipcp_get_transformation_summary (src);
3580 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3581 vec<ipa_alignment, va_gc> *&dst_alignments
3582 = ipcp_get_transformation_summary (dst)->alignments;
3583 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3584 for (unsigned i = 0; i < src_alignments->length (); ++i)
3585 dst_alignments->quick_push ((*src_alignments)[i]);
3586 }
3587 }
3588
3589
3590 /* Analyze newly added function into callgraph. */
3591
3592 static void
3593 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3594 {
3595 if (node->has_gimple_body_p ())
3596 ipa_analyze_node (node);
3597 }
3598
3599 /* Register our cgraph hooks if they are not already there. */
3600
3601 void
3602 ipa_register_cgraph_hooks (void)
3603 {
3604 if (!edge_removal_hook_holder)
3605 edge_removal_hook_holder =
3606 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3607 if (!node_removal_hook_holder)
3608 node_removal_hook_holder =
3609 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3610 if (!edge_duplication_hook_holder)
3611 edge_duplication_hook_holder =
3612 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3613 if (!node_duplication_hook_holder)
3614 node_duplication_hook_holder =
3615 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3616 if (!function_insertion_hook_holder)
3617 function_insertion_hook_holder =
3618 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3619 }
3620
3621 /* Unregister our cgraph hooks if they are not already there. */
3622
3623 static void
3624 ipa_unregister_cgraph_hooks (void)
3625 {
3626 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3627 edge_removal_hook_holder = NULL;
3628 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3629 node_removal_hook_holder = NULL;
3630 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3631 edge_duplication_hook_holder = NULL;
3632 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3633 node_duplication_hook_holder = NULL;
3634 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3635 function_insertion_hook_holder = NULL;
3636 }
3637
3638 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3639 longer needed after ipa-cp. */
3640
3641 void
3642 ipa_free_all_structures_after_ipa_cp (void)
3643 {
3644 if (!optimize && !in_lto_p)
3645 {
3646 ipa_free_all_edge_args ();
3647 ipa_free_all_node_params ();
3648 free_alloc_pool (ipcp_sources_pool);
3649 free_alloc_pool (ipcp_cst_values_pool);
3650 free_alloc_pool (ipcp_poly_ctx_values_pool);
3651 free_alloc_pool (ipcp_agg_lattice_pool);
3652 ipa_unregister_cgraph_hooks ();
3653 if (ipa_refdesc_pool)
3654 free_alloc_pool (ipa_refdesc_pool);
3655 }
3656 }
3657
3658 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3659 longer needed after indirect inlining. */
3660
3661 void
3662 ipa_free_all_structures_after_iinln (void)
3663 {
3664 ipa_free_all_edge_args ();
3665 ipa_free_all_node_params ();
3666 ipa_unregister_cgraph_hooks ();
3667 if (ipcp_sources_pool)
3668 free_alloc_pool (ipcp_sources_pool);
3669 if (ipcp_cst_values_pool)
3670 free_alloc_pool (ipcp_cst_values_pool);
3671 if (ipcp_poly_ctx_values_pool)
3672 free_alloc_pool (ipcp_poly_ctx_values_pool);
3673 if (ipcp_agg_lattice_pool)
3674 free_alloc_pool (ipcp_agg_lattice_pool);
3675 if (ipa_refdesc_pool)
3676 free_alloc_pool (ipa_refdesc_pool);
3677 }
3678
3679 /* Print ipa_tree_map data structures of all functions in the
3680 callgraph to F. */
3681
3682 void
3683 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3684 {
3685 int i, count;
3686 struct ipa_node_params *info;
3687
3688 if (!node->definition)
3689 return;
3690 info = IPA_NODE_REF (node);
3691 fprintf (f, " function %s/%i parameter descriptors:\n",
3692 node->name (), node->order);
3693 count = ipa_get_param_count (info);
3694 for (i = 0; i < count; i++)
3695 {
3696 int c;
3697
3698 fprintf (f, " ");
3699 ipa_dump_param (f, info, i);
3700 if (ipa_is_param_used (info, i))
3701 fprintf (f, " used");
3702 c = ipa_get_controlled_uses (info, i);
3703 if (c == IPA_UNDESCRIBED_USE)
3704 fprintf (f, " undescribed_use");
3705 else
3706 fprintf (f, " controlled_uses=%i", c);
3707 fprintf (f, "\n");
3708 }
3709 }
3710
3711 /* Print ipa_tree_map data structures of all functions in the
3712 callgraph to F. */
3713
3714 void
3715 ipa_print_all_params (FILE * f)
3716 {
3717 struct cgraph_node *node;
3718
3719 fprintf (f, "\nFunction parameters:\n");
3720 FOR_EACH_FUNCTION (node)
3721 ipa_print_node_params (f, node);
3722 }
3723
3724 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3725
3726 vec<tree>
3727 ipa_get_vector_of_formal_parms (tree fndecl)
3728 {
3729 vec<tree> args;
3730 int count;
3731 tree parm;
3732
3733 gcc_assert (!flag_wpa);
3734 count = count_formal_params (fndecl);
3735 args.create (count);
3736 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3737 args.quick_push (parm);
3738
3739 return args;
3740 }
3741
3742 /* Return a heap allocated vector containing types of formal parameters of
3743 function type FNTYPE. */
3744
3745 vec<tree>
3746 ipa_get_vector_of_formal_parm_types (tree fntype)
3747 {
3748 vec<tree> types;
3749 int count = 0;
3750 tree t;
3751
3752 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3753 count++;
3754
3755 types.create (count);
3756 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3757 types.quick_push (TREE_VALUE (t));
3758
3759 return types;
3760 }
3761
3762 /* Modify the function declaration FNDECL and its type according to the plan in
3763 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3764 to reflect the actual parameters being modified which are determined by the
3765 base_index field. */
3766
3767 void
3768 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3769 {
3770 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3771 tree orig_type = TREE_TYPE (fndecl);
3772 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3773
3774 /* The following test is an ugly hack, some functions simply don't have any
3775 arguments in their type. This is probably a bug but well... */
3776 bool care_for_types = (old_arg_types != NULL_TREE);
3777 bool last_parm_void;
3778 vec<tree> otypes;
3779 if (care_for_types)
3780 {
3781 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3782 == void_type_node);
3783 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3784 if (last_parm_void)
3785 gcc_assert (oparms.length () + 1 == otypes.length ());
3786 else
3787 gcc_assert (oparms.length () == otypes.length ());
3788 }
3789 else
3790 {
3791 last_parm_void = false;
3792 otypes.create (0);
3793 }
3794
3795 int len = adjustments.length ();
3796 tree *link = &DECL_ARGUMENTS (fndecl);
3797 tree new_arg_types = NULL;
3798 for (int i = 0; i < len; i++)
3799 {
3800 struct ipa_parm_adjustment *adj;
3801 gcc_assert (link);
3802
3803 adj = &adjustments[i];
3804 tree parm;
3805 if (adj->op == IPA_PARM_OP_NEW)
3806 parm = NULL;
3807 else
3808 parm = oparms[adj->base_index];
3809 adj->base = parm;
3810
3811 if (adj->op == IPA_PARM_OP_COPY)
3812 {
3813 if (care_for_types)
3814 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3815 new_arg_types);
3816 *link = parm;
3817 link = &DECL_CHAIN (parm);
3818 }
3819 else if (adj->op != IPA_PARM_OP_REMOVE)
3820 {
3821 tree new_parm;
3822 tree ptype;
3823
3824 if (adj->by_ref)
3825 ptype = build_pointer_type (adj->type);
3826 else
3827 {
3828 ptype = adj->type;
3829 if (is_gimple_reg_type (ptype))
3830 {
3831 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3832 if (TYPE_ALIGN (ptype) < malign)
3833 ptype = build_aligned_type (ptype, malign);
3834 }
3835 }
3836
3837 if (care_for_types)
3838 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3839
3840 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3841 ptype);
3842 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3843 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3844 DECL_ARTIFICIAL (new_parm) = 1;
3845 DECL_ARG_TYPE (new_parm) = ptype;
3846 DECL_CONTEXT (new_parm) = fndecl;
3847 TREE_USED (new_parm) = 1;
3848 DECL_IGNORED_P (new_parm) = 1;
3849 layout_decl (new_parm, 0);
3850
3851 if (adj->op == IPA_PARM_OP_NEW)
3852 adj->base = NULL;
3853 else
3854 adj->base = parm;
3855 adj->new_decl = new_parm;
3856
3857 *link = new_parm;
3858 link = &DECL_CHAIN (new_parm);
3859 }
3860 }
3861
3862 *link = NULL_TREE;
3863
3864 tree new_reversed = NULL;
3865 if (care_for_types)
3866 {
3867 new_reversed = nreverse (new_arg_types);
3868 if (last_parm_void)
3869 {
3870 if (new_reversed)
3871 TREE_CHAIN (new_arg_types) = void_list_node;
3872 else
3873 new_reversed = void_list_node;
3874 }
3875 }
3876
3877 /* Use copy_node to preserve as much as possible from original type
3878 (debug info, attribute lists etc.)
3879 Exception is METHOD_TYPEs must have THIS argument.
3880 When we are asked to remove it, we need to build new FUNCTION_TYPE
3881 instead. */
3882 tree new_type = NULL;
3883 if (TREE_CODE (orig_type) != METHOD_TYPE
3884 || (adjustments[0].op == IPA_PARM_OP_COPY
3885 && adjustments[0].base_index == 0))
3886 {
3887 new_type = build_distinct_type_copy (orig_type);
3888 TYPE_ARG_TYPES (new_type) = new_reversed;
3889 }
3890 else
3891 {
3892 new_type
3893 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3894 new_reversed));
3895 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3896 DECL_VINDEX (fndecl) = NULL_TREE;
3897 }
3898
3899 /* When signature changes, we need to clear builtin info. */
3900 if (DECL_BUILT_IN (fndecl))
3901 {
3902 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3903 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3904 }
3905
3906 TREE_TYPE (fndecl) = new_type;
3907 DECL_VIRTUAL_P (fndecl) = 0;
3908 DECL_LANG_SPECIFIC (fndecl) = NULL;
3909 otypes.release ();
3910 oparms.release ();
3911 }
3912
3913 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3914 If this is a directly recursive call, CS must be NULL. Otherwise it must
3915 contain the corresponding call graph edge. */
3916
3917 void
3918 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3919 ipa_parm_adjustment_vec adjustments)
3920 {
3921 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3922 vec<tree> vargs;
3923 vec<tree, va_gc> **debug_args = NULL;
3924 gcall *new_stmt;
3925 gimple_stmt_iterator gsi, prev_gsi;
3926 tree callee_decl;
3927 int i, len;
3928
3929 len = adjustments.length ();
3930 vargs.create (len);
3931 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3932 current_node->remove_stmt_references (stmt);
3933
3934 gsi = gsi_for_stmt (stmt);
3935 prev_gsi = gsi;
3936 gsi_prev (&prev_gsi);
3937 for (i = 0; i < len; i++)
3938 {
3939 struct ipa_parm_adjustment *adj;
3940
3941 adj = &adjustments[i];
3942
3943 if (adj->op == IPA_PARM_OP_COPY)
3944 {
3945 tree arg = gimple_call_arg (stmt, adj->base_index);
3946
3947 vargs.quick_push (arg);
3948 }
3949 else if (adj->op != IPA_PARM_OP_REMOVE)
3950 {
3951 tree expr, base, off;
3952 location_t loc;
3953 unsigned int deref_align = 0;
3954 bool deref_base = false;
3955
3956 /* We create a new parameter out of the value of the old one, we can
3957 do the following kind of transformations:
3958
3959 - A scalar passed by reference is converted to a scalar passed by
3960 value. (adj->by_ref is false and the type of the original
3961 actual argument is a pointer to a scalar).
3962
3963 - A part of an aggregate is passed instead of the whole aggregate.
3964 The part can be passed either by value or by reference, this is
3965 determined by value of adj->by_ref. Moreover, the code below
3966 handles both situations when the original aggregate is passed by
3967 value (its type is not a pointer) and when it is passed by
3968 reference (it is a pointer to an aggregate).
3969
3970 When the new argument is passed by reference (adj->by_ref is true)
3971 it must be a part of an aggregate and therefore we form it by
3972 simply taking the address of a reference inside the original
3973 aggregate. */
3974
3975 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3976 base = gimple_call_arg (stmt, adj->base_index);
3977 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3978 : EXPR_LOCATION (base);
3979
3980 if (TREE_CODE (base) != ADDR_EXPR
3981 && POINTER_TYPE_P (TREE_TYPE (base)))
3982 off = build_int_cst (adj->alias_ptr_type,
3983 adj->offset / BITS_PER_UNIT);
3984 else
3985 {
3986 HOST_WIDE_INT base_offset;
3987 tree prev_base;
3988 bool addrof;
3989
3990 if (TREE_CODE (base) == ADDR_EXPR)
3991 {
3992 base = TREE_OPERAND (base, 0);
3993 addrof = true;
3994 }
3995 else
3996 addrof = false;
3997 prev_base = base;
3998 base = get_addr_base_and_unit_offset (base, &base_offset);
3999 /* Aggregate arguments can have non-invariant addresses. */
4000 if (!base)
4001 {
4002 base = build_fold_addr_expr (prev_base);
4003 off = build_int_cst (adj->alias_ptr_type,
4004 adj->offset / BITS_PER_UNIT);
4005 }
4006 else if (TREE_CODE (base) == MEM_REF)
4007 {
4008 if (!addrof)
4009 {
4010 deref_base = true;
4011 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4012 }
4013 off = build_int_cst (adj->alias_ptr_type,
4014 base_offset
4015 + adj->offset / BITS_PER_UNIT);
4016 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4017 off);
4018 base = TREE_OPERAND (base, 0);
4019 }
4020 else
4021 {
4022 off = build_int_cst (adj->alias_ptr_type,
4023 base_offset
4024 + adj->offset / BITS_PER_UNIT);
4025 base = build_fold_addr_expr (base);
4026 }
4027 }
4028
4029 if (!adj->by_ref)
4030 {
4031 tree type = adj->type;
4032 unsigned int align;
4033 unsigned HOST_WIDE_INT misalign;
4034
4035 if (deref_base)
4036 {
4037 align = deref_align;
4038 misalign = 0;
4039 }
4040 else
4041 {
4042 get_pointer_alignment_1 (base, &align, &misalign);
4043 if (TYPE_ALIGN (type) > align)
4044 align = TYPE_ALIGN (type);
4045 }
4046 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4047 * BITS_PER_UNIT);
4048 misalign = misalign & (align - 1);
4049 if (misalign != 0)
4050 align = (misalign & -misalign);
4051 if (align < TYPE_ALIGN (type))
4052 type = build_aligned_type (type, align);
4053 base = force_gimple_operand_gsi (&gsi, base,
4054 true, NULL, true, GSI_SAME_STMT);
4055 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4056 /* If expr is not a valid gimple call argument emit
4057 a load into a temporary. */
4058 if (is_gimple_reg_type (TREE_TYPE (expr)))
4059 {
4060 gimple tem = gimple_build_assign (NULL_TREE, expr);
4061 if (gimple_in_ssa_p (cfun))
4062 {
4063 gimple_set_vuse (tem, gimple_vuse (stmt));
4064 expr = make_ssa_name (TREE_TYPE (expr), tem);
4065 }
4066 else
4067 expr = create_tmp_reg (TREE_TYPE (expr));
4068 gimple_assign_set_lhs (tem, expr);
4069 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4070 }
4071 }
4072 else
4073 {
4074 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4075 expr = build_fold_addr_expr (expr);
4076 expr = force_gimple_operand_gsi (&gsi, expr,
4077 true, NULL, true, GSI_SAME_STMT);
4078 }
4079 vargs.quick_push (expr);
4080 }
4081 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4082 {
4083 unsigned int ix;
4084 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4085 gimple def_temp;
4086
4087 arg = gimple_call_arg (stmt, adj->base_index);
4088 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4089 {
4090 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4091 continue;
4092 arg = fold_convert_loc (gimple_location (stmt),
4093 TREE_TYPE (origin), arg);
4094 }
4095 if (debug_args == NULL)
4096 debug_args = decl_debug_args_insert (callee_decl);
4097 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4098 if (ddecl == origin)
4099 {
4100 ddecl = (**debug_args)[ix + 1];
4101 break;
4102 }
4103 if (ddecl == NULL)
4104 {
4105 ddecl = make_node (DEBUG_EXPR_DECL);
4106 DECL_ARTIFICIAL (ddecl) = 1;
4107 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4108 DECL_MODE (ddecl) = DECL_MODE (origin);
4109
4110 vec_safe_push (*debug_args, origin);
4111 vec_safe_push (*debug_args, ddecl);
4112 }
4113 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4114 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4115 }
4116 }
4117
4118 if (dump_file && (dump_flags & TDF_DETAILS))
4119 {
4120 fprintf (dump_file, "replacing stmt:");
4121 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4122 }
4123
4124 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4125 vargs.release ();
4126 if (gimple_call_lhs (stmt))
4127 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4128
4129 gimple_set_block (new_stmt, gimple_block (stmt));
4130 if (gimple_has_location (stmt))
4131 gimple_set_location (new_stmt, gimple_location (stmt));
4132 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4133 gimple_call_copy_flags (new_stmt, stmt);
4134 if (gimple_in_ssa_p (cfun))
4135 {
4136 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4137 if (gimple_vdef (stmt))
4138 {
4139 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4140 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4141 }
4142 }
4143
4144 if (dump_file && (dump_flags & TDF_DETAILS))
4145 {
4146 fprintf (dump_file, "with stmt:");
4147 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4148 fprintf (dump_file, "\n");
4149 }
4150 gsi_replace (&gsi, new_stmt, true);
4151 if (cs)
4152 cs->set_call_stmt (new_stmt);
4153 do
4154 {
4155 current_node->record_stmt_references (gsi_stmt (gsi));
4156 gsi_prev (&gsi);
4157 }
4158 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4159 }
4160
4161 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4162 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4163 specifies whether the function should care about type incompatibility the
4164 current and new expressions. If it is false, the function will leave
4165 incompatibility issues to the caller. Return true iff the expression
4166 was modified. */
4167
4168 bool
4169 ipa_modify_expr (tree *expr, bool convert,
4170 ipa_parm_adjustment_vec adjustments)
4171 {
4172 struct ipa_parm_adjustment *cand
4173 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4174 if (!cand)
4175 return false;
4176
4177 tree src;
4178 if (cand->by_ref)
4179 src = build_simple_mem_ref (cand->new_decl);
4180 else
4181 src = cand->new_decl;
4182
4183 if (dump_file && (dump_flags & TDF_DETAILS))
4184 {
4185 fprintf (dump_file, "About to replace expr ");
4186 print_generic_expr (dump_file, *expr, 0);
4187 fprintf (dump_file, " with ");
4188 print_generic_expr (dump_file, src, 0);
4189 fprintf (dump_file, "\n");
4190 }
4191
4192 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4193 {
4194 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4195 *expr = vce;
4196 }
4197 else
4198 *expr = src;
4199 return true;
4200 }
4201
4202 /* If T is an SSA_NAME, return NULL if it is not a default def or
4203 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4204 the base variable is always returned, regardless if it is a default
4205 def. Return T if it is not an SSA_NAME. */
4206
4207 static tree
4208 get_ssa_base_param (tree t, bool ignore_default_def)
4209 {
4210 if (TREE_CODE (t) == SSA_NAME)
4211 {
4212 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4213 return SSA_NAME_VAR (t);
4214 else
4215 return NULL_TREE;
4216 }
4217 return t;
4218 }
4219
4220 /* Given an expression, return an adjustment entry specifying the
4221 transformation to be done on EXPR. If no suitable adjustment entry
4222 was found, returns NULL.
4223
4224 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4225 default def, otherwise bail on them.
4226
4227 If CONVERT is non-NULL, this function will set *CONVERT if the
4228 expression provided is a component reference. ADJUSTMENTS is the
4229 adjustments vector. */
4230
4231 ipa_parm_adjustment *
4232 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4233 ipa_parm_adjustment_vec adjustments,
4234 bool ignore_default_def)
4235 {
4236 if (TREE_CODE (**expr) == BIT_FIELD_REF
4237 || TREE_CODE (**expr) == IMAGPART_EXPR
4238 || TREE_CODE (**expr) == REALPART_EXPR)
4239 {
4240 *expr = &TREE_OPERAND (**expr, 0);
4241 if (convert)
4242 *convert = true;
4243 }
4244
4245 HOST_WIDE_INT offset, size, max_size;
4246 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4247 if (!base || size == -1 || max_size == -1)
4248 return NULL;
4249
4250 if (TREE_CODE (base) == MEM_REF)
4251 {
4252 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4253 base = TREE_OPERAND (base, 0);
4254 }
4255
4256 base = get_ssa_base_param (base, ignore_default_def);
4257 if (!base || TREE_CODE (base) != PARM_DECL)
4258 return NULL;
4259
4260 struct ipa_parm_adjustment *cand = NULL;
4261 unsigned int len = adjustments.length ();
4262 for (unsigned i = 0; i < len; i++)
4263 {
4264 struct ipa_parm_adjustment *adj = &adjustments[i];
4265
4266 if (adj->base == base
4267 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4268 {
4269 cand = adj;
4270 break;
4271 }
4272 }
4273
4274 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4275 return NULL;
4276 return cand;
4277 }
4278
4279 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4280
4281 static bool
4282 index_in_adjustments_multiple_times_p (int base_index,
4283 ipa_parm_adjustment_vec adjustments)
4284 {
4285 int i, len = adjustments.length ();
4286 bool one = false;
4287
4288 for (i = 0; i < len; i++)
4289 {
4290 struct ipa_parm_adjustment *adj;
4291 adj = &adjustments[i];
4292
4293 if (adj->base_index == base_index)
4294 {
4295 if (one)
4296 return true;
4297 else
4298 one = true;
4299 }
4300 }
4301 return false;
4302 }
4303
4304
4305 /* Return adjustments that should have the same effect on function parameters
4306 and call arguments as if they were first changed according to adjustments in
4307 INNER and then by adjustments in OUTER. */
4308
4309 ipa_parm_adjustment_vec
4310 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4311 ipa_parm_adjustment_vec outer)
4312 {
4313 int i, outlen = outer.length ();
4314 int inlen = inner.length ();
4315 int removals = 0;
4316 ipa_parm_adjustment_vec adjustments, tmp;
4317
4318 tmp.create (inlen);
4319 for (i = 0; i < inlen; i++)
4320 {
4321 struct ipa_parm_adjustment *n;
4322 n = &inner[i];
4323
4324 if (n->op == IPA_PARM_OP_REMOVE)
4325 removals++;
4326 else
4327 {
4328 /* FIXME: Handling of new arguments are not implemented yet. */
4329 gcc_assert (n->op != IPA_PARM_OP_NEW);
4330 tmp.quick_push (*n);
4331 }
4332 }
4333
4334 adjustments.create (outlen + removals);
4335 for (i = 0; i < outlen; i++)
4336 {
4337 struct ipa_parm_adjustment r;
4338 struct ipa_parm_adjustment *out = &outer[i];
4339 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4340
4341 memset (&r, 0, sizeof (r));
4342 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4343 if (out->op == IPA_PARM_OP_REMOVE)
4344 {
4345 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4346 {
4347 r.op = IPA_PARM_OP_REMOVE;
4348 adjustments.quick_push (r);
4349 }
4350 continue;
4351 }
4352 else
4353 {
4354 /* FIXME: Handling of new arguments are not implemented yet. */
4355 gcc_assert (out->op != IPA_PARM_OP_NEW);
4356 }
4357
4358 r.base_index = in->base_index;
4359 r.type = out->type;
4360
4361 /* FIXME: Create nonlocal value too. */
4362
4363 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4364 r.op = IPA_PARM_OP_COPY;
4365 else if (in->op == IPA_PARM_OP_COPY)
4366 r.offset = out->offset;
4367 else if (out->op == IPA_PARM_OP_COPY)
4368 r.offset = in->offset;
4369 else
4370 r.offset = in->offset + out->offset;
4371 adjustments.quick_push (r);
4372 }
4373
4374 for (i = 0; i < inlen; i++)
4375 {
4376 struct ipa_parm_adjustment *n = &inner[i];
4377
4378 if (n->op == IPA_PARM_OP_REMOVE)
4379 adjustments.quick_push (*n);
4380 }
4381
4382 tmp.release ();
4383 return adjustments;
4384 }
4385
4386 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4387 friendly way, assuming they are meant to be applied to FNDECL. */
4388
4389 void
4390 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4391 tree fndecl)
4392 {
4393 int i, len = adjustments.length ();
4394 bool first = true;
4395 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4396
4397 fprintf (file, "IPA param adjustments: ");
4398 for (i = 0; i < len; i++)
4399 {
4400 struct ipa_parm_adjustment *adj;
4401 adj = &adjustments[i];
4402
4403 if (!first)
4404 fprintf (file, " ");
4405 else
4406 first = false;
4407
4408 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4409 print_generic_expr (file, parms[adj->base_index], 0);
4410 if (adj->base)
4411 {
4412 fprintf (file, ", base: ");
4413 print_generic_expr (file, adj->base, 0);
4414 }
4415 if (adj->new_decl)
4416 {
4417 fprintf (file, ", new_decl: ");
4418 print_generic_expr (file, adj->new_decl, 0);
4419 }
4420 if (adj->new_ssa_base)
4421 {
4422 fprintf (file, ", new_ssa_base: ");
4423 print_generic_expr (file, adj->new_ssa_base, 0);
4424 }
4425
4426 if (adj->op == IPA_PARM_OP_COPY)
4427 fprintf (file, ", copy_param");
4428 else if (adj->op == IPA_PARM_OP_REMOVE)
4429 fprintf (file, ", remove_param");
4430 else
4431 fprintf (file, ", offset %li", (long) adj->offset);
4432 if (adj->by_ref)
4433 fprintf (file, ", by_ref");
4434 print_node_brief (file, ", type: ", adj->type, 0);
4435 fprintf (file, "\n");
4436 }
4437 parms.release ();
4438 }
4439
4440 /* Dump the AV linked list. */
4441
4442 void
4443 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4444 {
4445 bool comma = false;
4446 fprintf (f, " Aggregate replacements:");
4447 for (; av; av = av->next)
4448 {
4449 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4450 av->index, av->offset);
4451 print_generic_expr (f, av->value, 0);
4452 comma = true;
4453 }
4454 fprintf (f, "\n");
4455 }
4456
4457 /* Stream out jump function JUMP_FUNC to OB. */
4458
4459 static void
4460 ipa_write_jump_function (struct output_block *ob,
4461 struct ipa_jump_func *jump_func)
4462 {
4463 struct ipa_agg_jf_item *item;
4464 struct bitpack_d bp;
4465 int i, count;
4466
4467 streamer_write_uhwi (ob, jump_func->type);
4468 switch (jump_func->type)
4469 {
4470 case IPA_JF_UNKNOWN:
4471 break;
4472 case IPA_JF_CONST:
4473 gcc_assert (
4474 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4475 stream_write_tree (ob, jump_func->value.constant.value, true);
4476 break;
4477 case IPA_JF_PASS_THROUGH:
4478 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4479 if (jump_func->value.pass_through.operation == NOP_EXPR)
4480 {
4481 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4482 bp = bitpack_create (ob->main_stream);
4483 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4484 streamer_write_bitpack (&bp);
4485 }
4486 else
4487 {
4488 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4489 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4490 }
4491 break;
4492 case IPA_JF_ANCESTOR:
4493 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4494 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4495 bp = bitpack_create (ob->main_stream);
4496 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4497 streamer_write_bitpack (&bp);
4498 break;
4499 }
4500
4501 count = vec_safe_length (jump_func->agg.items);
4502 streamer_write_uhwi (ob, count);
4503 if (count)
4504 {
4505 bp = bitpack_create (ob->main_stream);
4506 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4507 streamer_write_bitpack (&bp);
4508 }
4509
4510 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4511 {
4512 streamer_write_uhwi (ob, item->offset);
4513 stream_write_tree (ob, item->value, true);
4514 }
4515
4516 bp = bitpack_create (ob->main_stream);
4517 bp_pack_value (&bp, jump_func->alignment.known, 1);
4518 streamer_write_bitpack (&bp);
4519 if (jump_func->alignment.known)
4520 {
4521 streamer_write_uhwi (ob, jump_func->alignment.align);
4522 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4523 }
4524 }
4525
4526 /* Read in jump function JUMP_FUNC from IB. */
4527
4528 static void
4529 ipa_read_jump_function (struct lto_input_block *ib,
4530 struct ipa_jump_func *jump_func,
4531 struct cgraph_edge *cs,
4532 struct data_in *data_in)
4533 {
4534 enum jump_func_type jftype;
4535 enum tree_code operation;
4536 int i, count;
4537
4538 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4539 switch (jftype)
4540 {
4541 case IPA_JF_UNKNOWN:
4542 ipa_set_jf_unknown (jump_func);
4543 break;
4544 case IPA_JF_CONST:
4545 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4546 break;
4547 case IPA_JF_PASS_THROUGH:
4548 operation = (enum tree_code) streamer_read_uhwi (ib);
4549 if (operation == NOP_EXPR)
4550 {
4551 int formal_id = streamer_read_uhwi (ib);
4552 struct bitpack_d bp = streamer_read_bitpack (ib);
4553 bool agg_preserved = bp_unpack_value (&bp, 1);
4554 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4555 }
4556 else
4557 {
4558 tree operand = stream_read_tree (ib, data_in);
4559 int formal_id = streamer_read_uhwi (ib);
4560 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4561 operation);
4562 }
4563 break;
4564 case IPA_JF_ANCESTOR:
4565 {
4566 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4567 int formal_id = streamer_read_uhwi (ib);
4568 struct bitpack_d bp = streamer_read_bitpack (ib);
4569 bool agg_preserved = bp_unpack_value (&bp, 1);
4570 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4571 break;
4572 }
4573 }
4574
4575 count = streamer_read_uhwi (ib);
4576 vec_alloc (jump_func->agg.items, count);
4577 if (count)
4578 {
4579 struct bitpack_d bp = streamer_read_bitpack (ib);
4580 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4581 }
4582 for (i = 0; i < count; i++)
4583 {
4584 struct ipa_agg_jf_item item;
4585 item.offset = streamer_read_uhwi (ib);
4586 item.value = stream_read_tree (ib, data_in);
4587 jump_func->agg.items->quick_push (item);
4588 }
4589
4590 struct bitpack_d bp = streamer_read_bitpack (ib);
4591 bool alignment_known = bp_unpack_value (&bp, 1);
4592 if (alignment_known)
4593 {
4594 jump_func->alignment.known = true;
4595 jump_func->alignment.align = streamer_read_uhwi (ib);
4596 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4597 }
4598 else
4599 jump_func->alignment.known = false;
4600 }
4601
4602 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4603 relevant to indirect inlining to OB. */
4604
4605 static void
4606 ipa_write_indirect_edge_info (struct output_block *ob,
4607 struct cgraph_edge *cs)
4608 {
4609 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4610 struct bitpack_d bp;
4611
4612 streamer_write_hwi (ob, ii->param_index);
4613 bp = bitpack_create (ob->main_stream);
4614 bp_pack_value (&bp, ii->polymorphic, 1);
4615 bp_pack_value (&bp, ii->agg_contents, 1);
4616 bp_pack_value (&bp, ii->member_ptr, 1);
4617 bp_pack_value (&bp, ii->by_ref, 1);
4618 bp_pack_value (&bp, ii->vptr_changed, 1);
4619 streamer_write_bitpack (&bp);
4620 if (ii->agg_contents || ii->polymorphic)
4621 streamer_write_hwi (ob, ii->offset);
4622 else
4623 gcc_assert (ii->offset == 0);
4624
4625 if (ii->polymorphic)
4626 {
4627 streamer_write_hwi (ob, ii->otr_token);
4628 stream_write_tree (ob, ii->otr_type, true);
4629 ii->context.stream_out (ob);
4630 }
4631 }
4632
4633 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4634 relevant to indirect inlining from IB. */
4635
4636 static void
4637 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4638 struct data_in *data_in,
4639 struct cgraph_edge *cs)
4640 {
4641 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4642 struct bitpack_d bp;
4643
4644 ii->param_index = (int) streamer_read_hwi (ib);
4645 bp = streamer_read_bitpack (ib);
4646 ii->polymorphic = bp_unpack_value (&bp, 1);
4647 ii->agg_contents = bp_unpack_value (&bp, 1);
4648 ii->member_ptr = bp_unpack_value (&bp, 1);
4649 ii->by_ref = bp_unpack_value (&bp, 1);
4650 ii->vptr_changed = bp_unpack_value (&bp, 1);
4651 if (ii->agg_contents || ii->polymorphic)
4652 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4653 else
4654 ii->offset = 0;
4655 if (ii->polymorphic)
4656 {
4657 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4658 ii->otr_type = stream_read_tree (ib, data_in);
4659 ii->context.stream_in (ib, data_in);
4660 }
4661 }
4662
4663 /* Stream out NODE info to OB. */
4664
4665 static void
4666 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4667 {
4668 int node_ref;
4669 lto_symtab_encoder_t encoder;
4670 struct ipa_node_params *info = IPA_NODE_REF (node);
4671 int j;
4672 struct cgraph_edge *e;
4673 struct bitpack_d bp;
4674
4675 encoder = ob->decl_state->symtab_node_encoder;
4676 node_ref = lto_symtab_encoder_encode (encoder, node);
4677 streamer_write_uhwi (ob, node_ref);
4678
4679 streamer_write_uhwi (ob, ipa_get_param_count (info));
4680 for (j = 0; j < ipa_get_param_count (info); j++)
4681 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4682 bp = bitpack_create (ob->main_stream);
4683 gcc_assert (info->analysis_done
4684 || ipa_get_param_count (info) == 0);
4685 gcc_assert (!info->node_enqueued);
4686 gcc_assert (!info->ipcp_orig_node);
4687 for (j = 0; j < ipa_get_param_count (info); j++)
4688 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4689 streamer_write_bitpack (&bp);
4690 for (j = 0; j < ipa_get_param_count (info); j++)
4691 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4692 for (e = node->callees; e; e = e->next_callee)
4693 {
4694 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4695
4696 streamer_write_uhwi (ob,
4697 ipa_get_cs_argument_count (args) * 2
4698 + (args->polymorphic_call_contexts != NULL));
4699 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4700 {
4701 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4702 if (args->polymorphic_call_contexts != NULL)
4703 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4704 }
4705 }
4706 for (e = node->indirect_calls; e; e = e->next_callee)
4707 {
4708 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4709
4710 streamer_write_uhwi (ob,
4711 ipa_get_cs_argument_count (args) * 2
4712 + (args->polymorphic_call_contexts != NULL));
4713 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4714 {
4715 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4716 if (args->polymorphic_call_contexts != NULL)
4717 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4718 }
4719 ipa_write_indirect_edge_info (ob, e);
4720 }
4721 }
4722
4723 /* Stream in NODE info from IB. */
4724
4725 static void
4726 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4727 struct data_in *data_in)
4728 {
4729 struct ipa_node_params *info = IPA_NODE_REF (node);
4730 int k;
4731 struct cgraph_edge *e;
4732 struct bitpack_d bp;
4733
4734 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4735
4736 for (k = 0; k < ipa_get_param_count (info); k++)
4737 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4738
4739 bp = streamer_read_bitpack (ib);
4740 if (ipa_get_param_count (info) != 0)
4741 info->analysis_done = true;
4742 info->node_enqueued = false;
4743 for (k = 0; k < ipa_get_param_count (info); k++)
4744 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4745 for (k = 0; k < ipa_get_param_count (info); k++)
4746 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4747 for (e = node->callees; e; e = e->next_callee)
4748 {
4749 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4750 int count = streamer_read_uhwi (ib);
4751 bool contexts_computed = count & 1;
4752 count /= 2;
4753
4754 if (!count)
4755 continue;
4756 vec_safe_grow_cleared (args->jump_functions, count);
4757 if (contexts_computed)
4758 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4759
4760 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4761 {
4762 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4763 data_in);
4764 if (contexts_computed)
4765 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4766 }
4767 }
4768 for (e = node->indirect_calls; e; e = e->next_callee)
4769 {
4770 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4771 int count = streamer_read_uhwi (ib);
4772 bool contexts_computed = count & 1;
4773 count /= 2;
4774
4775 if (count)
4776 {
4777 vec_safe_grow_cleared (args->jump_functions, count);
4778 if (contexts_computed)
4779 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4780 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4781 {
4782 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4783 data_in);
4784 if (contexts_computed)
4785 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4786 }
4787 }
4788 ipa_read_indirect_edge_info (ib, data_in, e);
4789 }
4790 }
4791
4792 /* Write jump functions for nodes in SET. */
4793
4794 void
4795 ipa_prop_write_jump_functions (void)
4796 {
4797 struct cgraph_node *node;
4798 struct output_block *ob;
4799 unsigned int count = 0;
4800 lto_symtab_encoder_iterator lsei;
4801 lto_symtab_encoder_t encoder;
4802
4803
4804 if (!ipa_node_params_vector.exists ())
4805 return;
4806
4807 ob = create_output_block (LTO_section_jump_functions);
4808 encoder = ob->decl_state->symtab_node_encoder;
4809 ob->symbol = NULL;
4810 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4811 lsei_next_function_in_partition (&lsei))
4812 {
4813 node = lsei_cgraph_node (lsei);
4814 if (node->has_gimple_body_p ()
4815 && IPA_NODE_REF (node) != NULL)
4816 count++;
4817 }
4818
4819 streamer_write_uhwi (ob, count);
4820
4821 /* Process all of the functions. */
4822 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4823 lsei_next_function_in_partition (&lsei))
4824 {
4825 node = lsei_cgraph_node (lsei);
4826 if (node->has_gimple_body_p ()
4827 && IPA_NODE_REF (node) != NULL)
4828 ipa_write_node_info (ob, node);
4829 }
4830 streamer_write_char_stream (ob->main_stream, 0);
4831 produce_asm (ob, NULL);
4832 destroy_output_block (ob);
4833 }
4834
4835 /* Read section in file FILE_DATA of length LEN with data DATA. */
4836
4837 static void
4838 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4839 size_t len)
4840 {
4841 const struct lto_function_header *header =
4842 (const struct lto_function_header *) data;
4843 const int cfg_offset = sizeof (struct lto_function_header);
4844 const int main_offset = cfg_offset + header->cfg_size;
4845 const int string_offset = main_offset + header->main_size;
4846 struct data_in *data_in;
4847 unsigned int i;
4848 unsigned int count;
4849
4850 lto_input_block ib_main ((const char *) data + main_offset,
4851 header->main_size);
4852
4853 data_in =
4854 lto_data_in_create (file_data, (const char *) data + string_offset,
4855 header->string_size, vNULL);
4856 count = streamer_read_uhwi (&ib_main);
4857
4858 for (i = 0; i < count; i++)
4859 {
4860 unsigned int index;
4861 struct cgraph_node *node;
4862 lto_symtab_encoder_t encoder;
4863
4864 index = streamer_read_uhwi (&ib_main);
4865 encoder = file_data->symtab_node_encoder;
4866 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4867 index));
4868 gcc_assert (node->definition);
4869 ipa_read_node_info (&ib_main, node, data_in);
4870 }
4871 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4872 len);
4873 lto_data_in_delete (data_in);
4874 }
4875
4876 /* Read ipcp jump functions. */
4877
4878 void
4879 ipa_prop_read_jump_functions (void)
4880 {
4881 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4882 struct lto_file_decl_data *file_data;
4883 unsigned int j = 0;
4884
4885 ipa_check_create_node_params ();
4886 ipa_check_create_edge_args ();
4887 ipa_register_cgraph_hooks ();
4888
4889 while ((file_data = file_data_vec[j++]))
4890 {
4891 size_t len;
4892 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4893
4894 if (data)
4895 ipa_prop_read_section (file_data, data, len);
4896 }
4897 }
4898
4899 /* After merging units, we can get mismatch in argument counts.
4900 Also decl merging might've rendered parameter lists obsolete.
4901 Also compute called_with_variable_arg info. */
4902
4903 void
4904 ipa_update_after_lto_read (void)
4905 {
4906 ipa_check_create_node_params ();
4907 ipa_check_create_edge_args ();
4908 }
4909
4910 void
4911 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4912 {
4913 int node_ref;
4914 unsigned int count = 0;
4915 lto_symtab_encoder_t encoder;
4916 struct ipa_agg_replacement_value *aggvals, *av;
4917
4918 aggvals = ipa_get_agg_replacements_for_node (node);
4919 encoder = ob->decl_state->symtab_node_encoder;
4920 node_ref = lto_symtab_encoder_encode (encoder, node);
4921 streamer_write_uhwi (ob, node_ref);
4922
4923 for (av = aggvals; av; av = av->next)
4924 count++;
4925 streamer_write_uhwi (ob, count);
4926
4927 for (av = aggvals; av; av = av->next)
4928 {
4929 struct bitpack_d bp;
4930
4931 streamer_write_uhwi (ob, av->offset);
4932 streamer_write_uhwi (ob, av->index);
4933 stream_write_tree (ob, av->value, true);
4934
4935 bp = bitpack_create (ob->main_stream);
4936 bp_pack_value (&bp, av->by_ref, 1);
4937 streamer_write_bitpack (&bp);
4938 }
4939
4940 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4941 if (ts && vec_safe_length (ts->alignments) > 0)
4942 {
4943 count = ts->alignments->length ();
4944
4945 streamer_write_uhwi (ob, count);
4946 for (unsigned i = 0; i < count; ++i)
4947 {
4948 ipa_alignment *parm_al = &(*ts->alignments)[i];
4949
4950 struct bitpack_d bp;
4951 bp = bitpack_create (ob->main_stream);
4952 bp_pack_value (&bp, parm_al->known, 1);
4953 streamer_write_bitpack (&bp);
4954 if (parm_al->known)
4955 {
4956 streamer_write_uhwi (ob, parm_al->align);
4957 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
4958 parm_al->misalign);
4959 }
4960 }
4961 }
4962 else
4963 streamer_write_uhwi (ob, 0);
4964 }
4965
4966 /* Stream in the aggregate value replacement chain for NODE from IB. */
4967
4968 static void
4969 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4970 data_in *data_in)
4971 {
4972 struct ipa_agg_replacement_value *aggvals = NULL;
4973 unsigned int count, i;
4974
4975 count = streamer_read_uhwi (ib);
4976 for (i = 0; i <count; i++)
4977 {
4978 struct ipa_agg_replacement_value *av;
4979 struct bitpack_d bp;
4980
4981 av = ggc_alloc<ipa_agg_replacement_value> ();
4982 av->offset = streamer_read_uhwi (ib);
4983 av->index = streamer_read_uhwi (ib);
4984 av->value = stream_read_tree (ib, data_in);
4985 bp = streamer_read_bitpack (ib);
4986 av->by_ref = bp_unpack_value (&bp, 1);
4987 av->next = aggvals;
4988 aggvals = av;
4989 }
4990 ipa_set_node_agg_value_chain (node, aggvals);
4991
4992 count = streamer_read_uhwi (ib);
4993 if (count > 0)
4994 {
4995 ipcp_grow_transformations_if_necessary ();
4996
4997 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4998 vec_safe_grow_cleared (ts->alignments, count);
4999
5000 for (i = 0; i < count; i++)
5001 {
5002 ipa_alignment *parm_al;
5003 parm_al = &(*ts->alignments)[i];
5004 struct bitpack_d bp;
5005 bp = streamer_read_bitpack (ib);
5006 parm_al->known = bp_unpack_value (&bp, 1);
5007 if (parm_al->known)
5008 {
5009 parm_al->align = streamer_read_uhwi (ib);
5010 parm_al->misalign
5011 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
5012 0, parm_al->align);
5013 }
5014 }
5015 }
5016 }
5017
5018 /* Write all aggregate replacement for nodes in set. */
5019
5020 void
5021 ipcp_write_transformation_summaries (void)
5022 {
5023 struct cgraph_node *node;
5024 struct output_block *ob;
5025 unsigned int count = 0;
5026 lto_symtab_encoder_iterator lsei;
5027 lto_symtab_encoder_t encoder;
5028
5029 ob = create_output_block (LTO_section_ipcp_transform);
5030 encoder = ob->decl_state->symtab_node_encoder;
5031 ob->symbol = NULL;
5032 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5033 lsei_next_function_in_partition (&lsei))
5034 {
5035 node = lsei_cgraph_node (lsei);
5036 if (node->has_gimple_body_p ())
5037 count++;
5038 }
5039
5040 streamer_write_uhwi (ob, count);
5041
5042 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5043 lsei_next_function_in_partition (&lsei))
5044 {
5045 node = lsei_cgraph_node (lsei);
5046 if (node->has_gimple_body_p ())
5047 write_ipcp_transformation_info (ob, node);
5048 }
5049 streamer_write_char_stream (ob->main_stream, 0);
5050 produce_asm (ob, NULL);
5051 destroy_output_block (ob);
5052 }
5053
5054 /* Read replacements section in file FILE_DATA of length LEN with data
5055 DATA. */
5056
5057 static void
5058 read_replacements_section (struct lto_file_decl_data *file_data,
5059 const char *data,
5060 size_t len)
5061 {
5062 const struct lto_function_header *header =
5063 (const struct lto_function_header *) data;
5064 const int cfg_offset = sizeof (struct lto_function_header);
5065 const int main_offset = cfg_offset + header->cfg_size;
5066 const int string_offset = main_offset + header->main_size;
5067 struct data_in *data_in;
5068 unsigned int i;
5069 unsigned int count;
5070
5071 lto_input_block ib_main ((const char *) data + main_offset,
5072 header->main_size);
5073
5074 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5075 header->string_size, vNULL);
5076 count = streamer_read_uhwi (&ib_main);
5077
5078 for (i = 0; i < count; i++)
5079 {
5080 unsigned int index;
5081 struct cgraph_node *node;
5082 lto_symtab_encoder_t encoder;
5083
5084 index = streamer_read_uhwi (&ib_main);
5085 encoder = file_data->symtab_node_encoder;
5086 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5087 index));
5088 gcc_assert (node->definition);
5089 read_ipcp_transformation_info (&ib_main, node, data_in);
5090 }
5091 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5092 len);
5093 lto_data_in_delete (data_in);
5094 }
5095
5096 /* Read IPA-CP aggregate replacements. */
5097
5098 void
5099 ipcp_read_transformation_summaries (void)
5100 {
5101 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5102 struct lto_file_decl_data *file_data;
5103 unsigned int j = 0;
5104
5105 while ((file_data = file_data_vec[j++]))
5106 {
5107 size_t len;
5108 const char *data = lto_get_section_data (file_data,
5109 LTO_section_ipcp_transform,
5110 NULL, &len);
5111 if (data)
5112 read_replacements_section (file_data, data, len);
5113 }
5114 }
5115
5116 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5117 NODE. */
5118
5119 static void
5120 adjust_agg_replacement_values (struct cgraph_node *node,
5121 struct ipa_agg_replacement_value *aggval)
5122 {
5123 struct ipa_agg_replacement_value *v;
5124 int i, c = 0, d = 0, *adj;
5125
5126 if (!node->clone.combined_args_to_skip)
5127 return;
5128
5129 for (v = aggval; v; v = v->next)
5130 {
5131 gcc_assert (v->index >= 0);
5132 if (c < v->index)
5133 c = v->index;
5134 }
5135 c++;
5136
5137 adj = XALLOCAVEC (int, c);
5138 for (i = 0; i < c; i++)
5139 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5140 {
5141 adj[i] = -1;
5142 d++;
5143 }
5144 else
5145 adj[i] = i - d;
5146
5147 for (v = aggval; v; v = v->next)
5148 v->index = adj[v->index];
5149 }
5150
5151 /* Dominator walker driving the ipcp modification phase. */
5152
5153 class ipcp_modif_dom_walker : public dom_walker
5154 {
5155 public:
5156 ipcp_modif_dom_walker (struct func_body_info *fbi,
5157 vec<ipa_param_descriptor> descs,
5158 struct ipa_agg_replacement_value *av,
5159 bool *sc, bool *cc)
5160 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5161 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5162
5163 virtual void before_dom_children (basic_block);
5164
5165 private:
5166 struct func_body_info *m_fbi;
5167 vec<ipa_param_descriptor> m_descriptors;
5168 struct ipa_agg_replacement_value *m_aggval;
5169 bool *m_something_changed, *m_cfg_changed;
5170 };
5171
5172 void
5173 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5174 {
5175 gimple_stmt_iterator gsi;
5176 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5177 {
5178 struct ipa_agg_replacement_value *v;
5179 gimple stmt = gsi_stmt (gsi);
5180 tree rhs, val, t;
5181 HOST_WIDE_INT offset, size;
5182 int index;
5183 bool by_ref, vce;
5184
5185 if (!gimple_assign_load_p (stmt))
5186 continue;
5187 rhs = gimple_assign_rhs1 (stmt);
5188 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5189 continue;
5190
5191 vce = false;
5192 t = rhs;
5193 while (handled_component_p (t))
5194 {
5195 /* V_C_E can do things like convert an array of integers to one
5196 bigger integer and similar things we do not handle below. */
5197 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5198 {
5199 vce = true;
5200 break;
5201 }
5202 t = TREE_OPERAND (t, 0);
5203 }
5204 if (vce)
5205 continue;
5206
5207 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5208 &offset, &size, &by_ref))
5209 continue;
5210 for (v = m_aggval; v; v = v->next)
5211 if (v->index == index
5212 && v->offset == offset)
5213 break;
5214 if (!v
5215 || v->by_ref != by_ref
5216 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5217 continue;
5218
5219 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5220 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5221 {
5222 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5223 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5224 else if (TYPE_SIZE (TREE_TYPE (rhs))
5225 == TYPE_SIZE (TREE_TYPE (v->value)))
5226 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5227 else
5228 {
5229 if (dump_file)
5230 {
5231 fprintf (dump_file, " const ");
5232 print_generic_expr (dump_file, v->value, 0);
5233 fprintf (dump_file, " can't be converted to type of ");
5234 print_generic_expr (dump_file, rhs, 0);
5235 fprintf (dump_file, "\n");
5236 }
5237 continue;
5238 }
5239 }
5240 else
5241 val = v->value;
5242
5243 if (dump_file && (dump_flags & TDF_DETAILS))
5244 {
5245 fprintf (dump_file, "Modifying stmt:\n ");
5246 print_gimple_stmt (dump_file, stmt, 0, 0);
5247 }
5248 gimple_assign_set_rhs_from_tree (&gsi, val);
5249 update_stmt (stmt);
5250
5251 if (dump_file && (dump_flags & TDF_DETAILS))
5252 {
5253 fprintf (dump_file, "into:\n ");
5254 print_gimple_stmt (dump_file, stmt, 0, 0);
5255 fprintf (dump_file, "\n");
5256 }
5257
5258 *m_something_changed = true;
5259 if (maybe_clean_eh_stmt (stmt)
5260 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5261 *m_cfg_changed = true;
5262 }
5263
5264 }
5265
5266 /* Update alignment of formal parameters as described in
5267 ipcp_transformation_summary. */
5268
5269 static void
5270 ipcp_update_alignments (struct cgraph_node *node)
5271 {
5272 tree fndecl = node->decl;
5273 tree parm = DECL_ARGUMENTS (fndecl);
5274 tree next_parm = parm;
5275 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5276 if (!ts || vec_safe_length (ts->alignments) == 0)
5277 return;
5278 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5279 unsigned count = alignments.length ();
5280
5281 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5282 {
5283 if (node->clone.combined_args_to_skip
5284 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5285 continue;
5286 gcc_checking_assert (parm);
5287 next_parm = DECL_CHAIN (parm);
5288
5289 if (!alignments[i].known || !is_gimple_reg (parm))
5290 continue;
5291 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5292 if (!ddef)
5293 continue;
5294
5295 if (dump_file)
5296 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5297 "misalignment to %u\n", i, alignments[i].align,
5298 alignments[i].misalign);
5299
5300 struct ptr_info_def *pi = get_ptr_info (ddef);
5301 gcc_checking_assert (pi);
5302 unsigned old_align;
5303 unsigned old_misalign;
5304 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5305
5306 if (old_known
5307 && old_align >= alignments[i].align)
5308 {
5309 if (dump_file)
5310 fprintf (dump_file, " But the alignment was already %u.\n",
5311 old_align);
5312 continue;
5313 }
5314 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5315 }
5316 }
5317
5318 /* IPCP transformation phase doing propagation of aggregate values. */
5319
5320 unsigned int
5321 ipcp_transform_function (struct cgraph_node *node)
5322 {
5323 vec<ipa_param_descriptor> descriptors = vNULL;
5324 struct func_body_info fbi;
5325 struct ipa_agg_replacement_value *aggval;
5326 int param_count;
5327 bool cfg_changed = false, something_changed = false;
5328
5329 gcc_checking_assert (cfun);
5330 gcc_checking_assert (current_function_decl);
5331
5332 if (dump_file)
5333 fprintf (dump_file, "Modification phase of node %s/%i\n",
5334 node->name (), node->order);
5335
5336 ipcp_update_alignments (node);
5337 aggval = ipa_get_agg_replacements_for_node (node);
5338 if (!aggval)
5339 return 0;
5340 param_count = count_formal_params (node->decl);
5341 if (param_count == 0)
5342 return 0;
5343 adjust_agg_replacement_values (node, aggval);
5344 if (dump_file)
5345 ipa_dump_agg_replacement_values (dump_file, aggval);
5346
5347 fbi.node = node;
5348 fbi.info = NULL;
5349 fbi.bb_infos = vNULL;
5350 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5351 fbi.param_count = param_count;
5352 fbi.aa_walked = 0;
5353
5354 descriptors.safe_grow_cleared (param_count);
5355 ipa_populate_param_decls (node, descriptors);
5356 calculate_dominance_info (CDI_DOMINATORS);
5357 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5358 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5359
5360 int i;
5361 struct ipa_bb_info *bi;
5362 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5363 free_ipa_bb_info (bi);
5364 fbi.bb_infos.release ();
5365 free_dominance_info (CDI_DOMINATORS);
5366 (*ipcp_transformations)[node->uid].agg_values = NULL;
5367 (*ipcp_transformations)[node->uid].alignments = NULL;
5368 descriptors.release ();
5369
5370 if (!something_changed)
5371 return 0;
5372 else if (cfg_changed)
5373 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5374 else
5375 return TODO_update_ssa_only_virtuals;
5376 }