ipa-prop.c (update_indirect_edges_after_inlining): By more careful about not letting...
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hash-set.h"
24 #include "machmode.h"
25 #include "vec.h"
26 #include "double-int.h"
27 #include "input.h"
28 #include "alias.h"
29 #include "symtab.h"
30 #include "options.h"
31 #include "wide-int.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "predict.h"
36 #include "tm.h"
37 #include "hard-reg-set.h"
38 #include "function.h"
39 #include "dominance.h"
40 #include "cfg.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "hashtab.h"
50 #include "rtl.h"
51 #include "flags.h"
52 #include "statistics.h"
53 #include "real.h"
54 #include "fixed-value.h"
55 #include "insn-config.h"
56 #include "expmed.h"
57 #include "dojump.h"
58 #include "explow.h"
59 #include "calls.h"
60 #include "emit-rtl.h"
61 #include "varasm.h"
62 #include "stmt.h"
63 #include "expr.h"
64 #include "stor-layout.h"
65 #include "print-tree.h"
66 #include "gimplify.h"
67 #include "gimple-iterator.h"
68 #include "gimplify-me.h"
69 #include "gimple-walk.h"
70 #include "langhooks.h"
71 #include "target.h"
72 #include "hash-map.h"
73 #include "plugin-api.h"
74 #include "ipa-ref.h"
75 #include "cgraph.h"
76 #include "alloc-pool.h"
77 #include "symbol-summary.h"
78 #include "ipa-prop.h"
79 #include "bitmap.h"
80 #include "gimple-ssa.h"
81 #include "tree-cfg.h"
82 #include "tree-phinodes.h"
83 #include "ssa-iterators.h"
84 #include "tree-into-ssa.h"
85 #include "tree-dfa.h"
86 #include "tree-pass.h"
87 #include "tree-inline.h"
88 #include "ipa-inline.h"
89 #include "diagnostic.h"
90 #include "gimple-pretty-print.h"
91 #include "lto-streamer.h"
92 #include "data-streamer.h"
93 #include "tree-streamer.h"
94 #include "params.h"
95 #include "ipa-utils.h"
96 #include "stringpool.h"
97 #include "tree-ssanames.h"
98 #include "dbgcnt.h"
99 #include "domwalk.h"
100 #include "builtins.h"
101
102 /* Intermediate information that we get from alias analysis about a particular
103 parameter in a particular basic_block. When a parameter or the memory it
104 references is marked modified, we use that information in all dominatd
105 blocks without cosulting alias analysis oracle. */
106
107 struct param_aa_status
108 {
109 /* Set when this structure contains meaningful information. If not, the
110 structure describing a dominating BB should be used instead. */
111 bool valid;
112
113 /* Whether we have seen something which might have modified the data in
114 question. PARM is for the parameter itself, REF is for data it points to
115 but using the alias type of individual accesses and PT is the same thing
116 but for computing aggregate pass-through functions using a very inclusive
117 ao_ref. */
118 bool parm_modified, ref_modified, pt_modified;
119 };
120
121 /* Information related to a given BB that used only when looking at function
122 body. */
123
124 struct ipa_bb_info
125 {
126 /* Call graph edges going out of this BB. */
127 vec<cgraph_edge *> cg_edges;
128 /* Alias analysis statuses of each formal parameter at this bb. */
129 vec<param_aa_status> param_aa_statuses;
130 };
131
132 /* Structure with global information that is only used when looking at function
133 body. */
134
135 struct func_body_info
136 {
137 /* The node that is being analyzed. */
138 cgraph_node *node;
139
140 /* Its info. */
141 struct ipa_node_params *info;
142
143 /* Information about individual BBs. */
144 vec<ipa_bb_info> bb_infos;
145
146 /* Number of parameters. */
147 int param_count;
148
149 /* Number of statements already walked by when analyzing this function. */
150 unsigned int aa_walked;
151 };
152
153 /* Function summary where the parameter infos are actually stored. */
154 ipa_node_params_t *ipa_node_params_sum = NULL;
155 /* Vector of IPA-CP transformation data for each clone. */
156 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
157 /* Vector where the parameter infos are actually stored. */
158 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
159
160 /* Holders of ipa cgraph hooks: */
161 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
162 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
163 static struct cgraph_node_hook_list *function_insertion_hook_holder;
164
165 /* Description of a reference to an IPA constant. */
166 struct ipa_cst_ref_desc
167 {
168 /* Edge that corresponds to the statement which took the reference. */
169 struct cgraph_edge *cs;
170 /* Linked list of duplicates created when call graph edges are cloned. */
171 struct ipa_cst_ref_desc *next_duplicate;
172 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
173 if out of control. */
174 int refcount;
175 };
176
177 /* Allocation pool for reference descriptions. */
178
179 static alloc_pool ipa_refdesc_pool;
180
181 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
182 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
183
184 static bool
185 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
186 {
187 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
188
189 if (!fs_opts)
190 return false;
191 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
192 }
193
194 /* Return index of the formal whose tree is PTREE in function which corresponds
195 to INFO. */
196
197 static int
198 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
199 {
200 int i, count;
201
202 count = descriptors.length ();
203 for (i = 0; i < count; i++)
204 if (descriptors[i].decl == ptree)
205 return i;
206
207 return -1;
208 }
209
210 /* Return index of the formal whose tree is PTREE in function which corresponds
211 to INFO. */
212
213 int
214 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
215 {
216 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
217 }
218
219 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
220 NODE. */
221
222 static void
223 ipa_populate_param_decls (struct cgraph_node *node,
224 vec<ipa_param_descriptor> &descriptors)
225 {
226 tree fndecl;
227 tree fnargs;
228 tree parm;
229 int param_num;
230
231 fndecl = node->decl;
232 gcc_assert (gimple_has_body_p (fndecl));
233 fnargs = DECL_ARGUMENTS (fndecl);
234 param_num = 0;
235 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
236 {
237 descriptors[param_num].decl = parm;
238 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
239 true);
240 param_num++;
241 }
242 }
243
244 /* Return how many formal parameters FNDECL has. */
245
246 int
247 count_formal_params (tree fndecl)
248 {
249 tree parm;
250 int count = 0;
251 gcc_assert (gimple_has_body_p (fndecl));
252
253 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
254 count++;
255
256 return count;
257 }
258
259 /* Return the declaration of Ith formal parameter of the function corresponding
260 to INFO. Note there is no setter function as this array is built just once
261 using ipa_initialize_node_params. */
262
263 void
264 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
265 {
266 fprintf (file, "param #%i", i);
267 if (info->descriptors[i].decl)
268 {
269 fprintf (file, " ");
270 print_generic_expr (file, info->descriptors[i].decl, 0);
271 }
272 }
273
274 /* Initialize the ipa_node_params structure associated with NODE
275 to hold PARAM_COUNT parameters. */
276
277 void
278 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
279 {
280 struct ipa_node_params *info = IPA_NODE_REF (node);
281
282 if (!info->descriptors.exists () && param_count)
283 info->descriptors.safe_grow_cleared (param_count);
284 }
285
286 /* Initialize the ipa_node_params structure associated with NODE by counting
287 the function parameters, creating the descriptors and populating their
288 param_decls. */
289
290 void
291 ipa_initialize_node_params (struct cgraph_node *node)
292 {
293 struct ipa_node_params *info = IPA_NODE_REF (node);
294
295 if (!info->descriptors.exists ())
296 {
297 ipa_alloc_node_params (node, count_formal_params (node->decl));
298 ipa_populate_param_decls (node, info->descriptors);
299 }
300 }
301
302 /* Print the jump functions associated with call graph edge CS to file F. */
303
304 static void
305 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
306 {
307 int i, count;
308
309 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
310 for (i = 0; i < count; i++)
311 {
312 struct ipa_jump_func *jump_func;
313 enum jump_func_type type;
314
315 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
316 type = jump_func->type;
317
318 fprintf (f, " param %d: ", i);
319 if (type == IPA_JF_UNKNOWN)
320 fprintf (f, "UNKNOWN\n");
321 else if (type == IPA_JF_CONST)
322 {
323 tree val = jump_func->value.constant.value;
324 fprintf (f, "CONST: ");
325 print_generic_expr (f, val, 0);
326 if (TREE_CODE (val) == ADDR_EXPR
327 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
328 {
329 fprintf (f, " -> ");
330 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
331 0);
332 }
333 fprintf (f, "\n");
334 }
335 else if (type == IPA_JF_PASS_THROUGH)
336 {
337 fprintf (f, "PASS THROUGH: ");
338 fprintf (f, "%d, op %s",
339 jump_func->value.pass_through.formal_id,
340 get_tree_code_name(jump_func->value.pass_through.operation));
341 if (jump_func->value.pass_through.operation != NOP_EXPR)
342 {
343 fprintf (f, " ");
344 print_generic_expr (f,
345 jump_func->value.pass_through.operand, 0);
346 }
347 if (jump_func->value.pass_through.agg_preserved)
348 fprintf (f, ", agg_preserved");
349 fprintf (f, "\n");
350 }
351 else if (type == IPA_JF_ANCESTOR)
352 {
353 fprintf (f, "ANCESTOR: ");
354 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC,
355 jump_func->value.ancestor.formal_id,
356 jump_func->value.ancestor.offset);
357 if (jump_func->value.ancestor.agg_preserved)
358 fprintf (f, ", agg_preserved");
359 fprintf (f, "\n");
360 }
361
362 if (jump_func->agg.items)
363 {
364 struct ipa_agg_jf_item *item;
365 int j;
366
367 fprintf (f, " Aggregate passed by %s:\n",
368 jump_func->agg.by_ref ? "reference" : "value");
369 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
370 {
371 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
372 item->offset);
373 if (TYPE_P (item->value))
374 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
375 tree_to_uhwi (TYPE_SIZE (item->value)));
376 else
377 {
378 fprintf (f, "cst: ");
379 print_generic_expr (f, item->value, 0);
380 }
381 fprintf (f, "\n");
382 }
383 }
384
385 struct ipa_polymorphic_call_context *ctx
386 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
387 if (ctx && !ctx->useless_p ())
388 {
389 fprintf (f, " Context: ");
390 ctx->dump (dump_file);
391 }
392
393 if (jump_func->alignment.known)
394 {
395 fprintf (f, " Alignment: %u, misalignment: %u\n",
396 jump_func->alignment.align,
397 jump_func->alignment.misalign);
398 }
399 else
400 fprintf (f, " Unknown alignment\n");
401 }
402 }
403
404
405 /* Print the jump functions of all arguments on all call graph edges going from
406 NODE to file F. */
407
408 void
409 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
410 {
411 struct cgraph_edge *cs;
412
413 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
414 node->order);
415 for (cs = node->callees; cs; cs = cs->next_callee)
416 {
417 if (!ipa_edge_args_info_available_for_edge_p (cs))
418 continue;
419
420 fprintf (f, " callsite %s/%i -> %s/%i : \n",
421 xstrdup_for_dump (node->name ()), node->order,
422 xstrdup_for_dump (cs->callee->name ()),
423 cs->callee->order);
424 ipa_print_node_jump_functions_for_edge (f, cs);
425 }
426
427 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
428 {
429 struct cgraph_indirect_call_info *ii;
430 if (!ipa_edge_args_info_available_for_edge_p (cs))
431 continue;
432
433 ii = cs->indirect_info;
434 if (ii->agg_contents)
435 fprintf (f, " indirect %s callsite, calling param %i, "
436 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
437 ii->member_ptr ? "member ptr" : "aggregate",
438 ii->param_index, ii->offset,
439 ii->by_ref ? "by reference" : "by_value");
440 else
441 fprintf (f, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC,
443 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
444 ii->offset);
445
446 if (cs->call_stmt)
447 {
448 fprintf (f, ", for stmt ");
449 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
450 }
451 else
452 fprintf (f, "\n");
453 if (ii->polymorphic)
454 ii->context.dump (f);
455 ipa_print_node_jump_functions_for_edge (f, cs);
456 }
457 }
458
459 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
460
461 void
462 ipa_print_all_jump_functions (FILE *f)
463 {
464 struct cgraph_node *node;
465
466 fprintf (f, "\nJump functions:\n");
467 FOR_EACH_FUNCTION (node)
468 {
469 ipa_print_node_jump_functions (f, node);
470 }
471 }
472
473 /* Set jfunc to be a know-really nothing jump function. */
474
475 static void
476 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
477 {
478 jfunc->type = IPA_JF_UNKNOWN;
479 jfunc->alignment.known = false;
480 }
481
482 /* Set JFUNC to be a copy of another jmp (to be used by jump function
483 combination code). The two functions will share their rdesc. */
484
485 static void
486 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
487 struct ipa_jump_func *src)
488
489 {
490 gcc_checking_assert (src->type == IPA_JF_CONST);
491 dst->type = IPA_JF_CONST;
492 dst->value.constant = src->value.constant;
493 }
494
495 /* Set JFUNC to be a constant jmp function. */
496
497 static void
498 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
499 struct cgraph_edge *cs)
500 {
501 constant = unshare_expr (constant);
502 if (constant && EXPR_P (constant))
503 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
504 jfunc->type = IPA_JF_CONST;
505 jfunc->value.constant.value = unshare_expr_without_location (constant);
506
507 if (TREE_CODE (constant) == ADDR_EXPR
508 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
509 {
510 struct ipa_cst_ref_desc *rdesc;
511 if (!ipa_refdesc_pool)
512 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
513 sizeof (struct ipa_cst_ref_desc), 32);
514
515 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
516 rdesc->cs = cs;
517 rdesc->next_duplicate = NULL;
518 rdesc->refcount = 1;
519 jfunc->value.constant.rdesc = rdesc;
520 }
521 else
522 jfunc->value.constant.rdesc = NULL;
523 }
524
525 /* Set JFUNC to be a simple pass-through jump function. */
526 static void
527 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
528 bool agg_preserved)
529 {
530 jfunc->type = IPA_JF_PASS_THROUGH;
531 jfunc->value.pass_through.operand = NULL_TREE;
532 jfunc->value.pass_through.formal_id = formal_id;
533 jfunc->value.pass_through.operation = NOP_EXPR;
534 jfunc->value.pass_through.agg_preserved = agg_preserved;
535 }
536
537 /* Set JFUNC to be an arithmetic pass through jump function. */
538
539 static void
540 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
541 tree operand, enum tree_code operation)
542 {
543 jfunc->type = IPA_JF_PASS_THROUGH;
544 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
545 jfunc->value.pass_through.formal_id = formal_id;
546 jfunc->value.pass_through.operation = operation;
547 jfunc->value.pass_through.agg_preserved = false;
548 }
549
550 /* Set JFUNC to be an ancestor jump function. */
551
552 static void
553 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
554 int formal_id, bool agg_preserved)
555 {
556 jfunc->type = IPA_JF_ANCESTOR;
557 jfunc->value.ancestor.formal_id = formal_id;
558 jfunc->value.ancestor.offset = offset;
559 jfunc->value.ancestor.agg_preserved = agg_preserved;
560 }
561
562 /* Get IPA BB information about the given BB. FBI is the context of analyzis
563 of this function body. */
564
565 static struct ipa_bb_info *
566 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
567 {
568 gcc_checking_assert (fbi);
569 return &fbi->bb_infos[bb->index];
570 }
571
572 /* Structure to be passed in between detect_type_change and
573 check_stmt_for_type_change. */
574
575 struct prop_type_change_info
576 {
577 /* Offset into the object where there is the virtual method pointer we are
578 looking for. */
579 HOST_WIDE_INT offset;
580 /* The declaration or SSA_NAME pointer of the base that we are checking for
581 type change. */
582 tree object;
583 /* Set to true if dynamic type change has been detected. */
584 bool type_maybe_changed;
585 };
586
587 /* Return true if STMT can modify a virtual method table pointer.
588
589 This function makes special assumptions about both constructors and
590 destructors which are all the functions that are allowed to alter the VMT
591 pointers. It assumes that destructors begin with assignment into all VMT
592 pointers and that constructors essentially look in the following way:
593
594 1) The very first thing they do is that they call constructors of ancestor
595 sub-objects that have them.
596
597 2) Then VMT pointers of this and all its ancestors is set to new values
598 corresponding to the type corresponding to the constructor.
599
600 3) Only afterwards, other stuff such as constructor of member sub-objects
601 and the code written by the user is run. Only this may include calling
602 virtual functions, directly or indirectly.
603
604 There is no way to call a constructor of an ancestor sub-object in any
605 other way.
606
607 This means that we do not have to care whether constructors get the correct
608 type information because they will always change it (in fact, if we define
609 the type to be given by the VMT pointer, it is undefined).
610
611 The most important fact to derive from the above is that if, for some
612 statement in the section 3, we try to detect whether the dynamic type has
613 changed, we can safely ignore all calls as we examine the function body
614 backwards until we reach statements in section 2 because these calls cannot
615 be ancestor constructors or destructors (if the input is not bogus) and so
616 do not change the dynamic type (this holds true only for automatically
617 allocated objects but at the moment we devirtualize only these). We then
618 must detect that statements in section 2 change the dynamic type and can try
619 to derive the new type. That is enough and we can stop, we will never see
620 the calls into constructors of sub-objects in this code. Therefore we can
621 safely ignore all call statements that we traverse.
622 */
623
624 static bool
625 stmt_may_be_vtbl_ptr_store (gimple stmt)
626 {
627 if (is_gimple_call (stmt))
628 return false;
629 if (gimple_clobber_p (stmt))
630 return false;
631 else if (is_gimple_assign (stmt))
632 {
633 tree lhs = gimple_assign_lhs (stmt);
634
635 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
636 {
637 if (flag_strict_aliasing
638 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
639 return false;
640
641 if (TREE_CODE (lhs) == COMPONENT_REF
642 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
643 return false;
644 /* In the future we might want to use get_base_ref_and_offset to find
645 if there is a field corresponding to the offset and if so, proceed
646 almost like if it was a component ref. */
647 }
648 }
649 return true;
650 }
651
652 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
653 to check whether a particular statement may modify the virtual table
654 pointerIt stores its result into DATA, which points to a
655 prop_type_change_info structure. */
656
657 static bool
658 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
659 {
660 gimple stmt = SSA_NAME_DEF_STMT (vdef);
661 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
662
663 if (stmt_may_be_vtbl_ptr_store (stmt))
664 {
665 tci->type_maybe_changed = true;
666 return true;
667 }
668 else
669 return false;
670 }
671
672 /* See if ARG is PARAM_DECl describing instance passed by pointer
673 or reference in FUNCTION. Return false if the dynamic type may change
674 in between beggining of the function until CALL is invoked.
675
676 Generally functions are not allowed to change type of such instances,
677 but they call destructors. We assume that methods can not destroy the THIS
678 pointer. Also as a special cases, constructor and destructors may change
679 type of the THIS pointer. */
680
681 static bool
682 param_type_may_change_p (tree function, tree arg, gimple call)
683 {
684 /* Pure functions can not do any changes on the dynamic type;
685 that require writting to memory. */
686 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
687 return false;
688 /* We need to check if we are within inlined consturctor
689 or destructor (ideally we would have way to check that the
690 inline cdtor is actually working on ARG, but we don't have
691 easy tie on this, so punt on all non-pure cdtors.
692 We may also record the types of cdtors and once we know type
693 of the instance match them.
694
695 Also code unification optimizations may merge calls from
696 different blocks making return values unreliable. So
697 do nothing during late optimization. */
698 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
699 return true;
700 if (TREE_CODE (arg) == SSA_NAME
701 && SSA_NAME_IS_DEFAULT_DEF (arg)
702 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
703 {
704 /* Normal (non-THIS) argument. */
705 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
706 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
707 /* THIS pointer of an method - here we we want to watch constructors
708 and destructors as those definitely may change the dynamic
709 type. */
710 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
711 && !DECL_CXX_CONSTRUCTOR_P (function)
712 && !DECL_CXX_DESTRUCTOR_P (function)
713 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
714 {
715 /* Walk the inline stack and watch out for ctors/dtors. */
716 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
717 block = BLOCK_SUPERCONTEXT (block))
718 if (BLOCK_ABSTRACT_ORIGIN (block)
719 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
720 {
721 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
722
723 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
724 continue;
725 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
726 && (DECL_CXX_CONSTRUCTOR_P (fn)
727 || DECL_CXX_DESTRUCTOR_P (fn)))
728 return true;
729 }
730 return false;
731 }
732 }
733 return true;
734 }
735
736 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
737 callsite CALL) by looking for assignments to its virtual table pointer. If
738 it is, return true and fill in the jump function JFUNC with relevant type
739 information or set it to unknown. ARG is the object itself (not a pointer
740 to it, unless dereferenced). BASE is the base of the memory access as
741 returned by get_ref_base_and_extent, as is the offset.
742
743 This is helper function for detect_type_change and detect_type_change_ssa
744 that does the heavy work which is usually unnecesary. */
745
746 static bool
747 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
748 gcall *call, struct ipa_jump_func *jfunc,
749 HOST_WIDE_INT offset)
750 {
751 struct prop_type_change_info tci;
752 ao_ref ao;
753 bool entry_reached = false;
754
755 gcc_checking_assert (DECL_P (arg)
756 || TREE_CODE (arg) == MEM_REF
757 || handled_component_p (arg));
758
759 comp_type = TYPE_MAIN_VARIANT (comp_type);
760
761 /* Const calls cannot call virtual methods through VMT and so type changes do
762 not matter. */
763 if (!flag_devirtualize || !gimple_vuse (call)
764 /* Be sure expected_type is polymorphic. */
765 || !comp_type
766 || TREE_CODE (comp_type) != RECORD_TYPE
767 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
768 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
769 return true;
770
771 ao_ref_init (&ao, arg);
772 ao.base = base;
773 ao.offset = offset;
774 ao.size = POINTER_SIZE;
775 ao.max_size = ao.size;
776
777 tci.offset = offset;
778 tci.object = get_base_address (arg);
779 tci.type_maybe_changed = false;
780
781 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
782 &tci, NULL, &entry_reached);
783 if (!tci.type_maybe_changed)
784 return false;
785
786 ipa_set_jf_unknown (jfunc);
787 return true;
788 }
789
790 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
791 If it is, return true and fill in the jump function JFUNC with relevant type
792 information or set it to unknown. ARG is the object itself (not a pointer
793 to it, unless dereferenced). BASE is the base of the memory access as
794 returned by get_ref_base_and_extent, as is the offset. */
795
796 static bool
797 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
798 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
799 {
800 if (!flag_devirtualize)
801 return false;
802
803 if (TREE_CODE (base) == MEM_REF
804 && !param_type_may_change_p (current_function_decl,
805 TREE_OPERAND (base, 0),
806 call))
807 return false;
808 return detect_type_change_from_memory_writes (arg, base, comp_type,
809 call, jfunc, offset);
810 }
811
812 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
813 SSA name (its dereference will become the base and the offset is assumed to
814 be zero). */
815
816 static bool
817 detect_type_change_ssa (tree arg, tree comp_type,
818 gcall *call, struct ipa_jump_func *jfunc)
819 {
820 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
821 if (!flag_devirtualize
822 || !POINTER_TYPE_P (TREE_TYPE (arg)))
823 return false;
824
825 if (!param_type_may_change_p (current_function_decl, arg, call))
826 return false;
827
828 arg = build2 (MEM_REF, ptr_type_node, arg,
829 build_int_cst (ptr_type_node, 0));
830
831 return detect_type_change_from_memory_writes (arg, arg, comp_type,
832 call, jfunc, 0);
833 }
834
835 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
836 boolean variable pointed to by DATA. */
837
838 static bool
839 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
840 void *data)
841 {
842 bool *b = (bool *) data;
843 *b = true;
844 return true;
845 }
846
847 /* Return true if we have already walked so many statements in AA that we
848 should really just start giving up. */
849
850 static bool
851 aa_overwalked (struct func_body_info *fbi)
852 {
853 gcc_checking_assert (fbi);
854 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
855 }
856
857 /* Find the nearest valid aa status for parameter specified by INDEX that
858 dominates BB. */
859
860 static struct param_aa_status *
861 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
862 int index)
863 {
864 while (true)
865 {
866 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
867 if (!bb)
868 return NULL;
869 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
870 if (!bi->param_aa_statuses.is_empty ()
871 && bi->param_aa_statuses[index].valid)
872 return &bi->param_aa_statuses[index];
873 }
874 }
875
876 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
877 structures and/or intialize the result with a dominating description as
878 necessary. */
879
880 static struct param_aa_status *
881 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
882 int index)
883 {
884 gcc_checking_assert (fbi);
885 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
886 if (bi->param_aa_statuses.is_empty ())
887 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
888 struct param_aa_status *paa = &bi->param_aa_statuses[index];
889 if (!paa->valid)
890 {
891 gcc_checking_assert (!paa->parm_modified
892 && !paa->ref_modified
893 && !paa->pt_modified);
894 struct param_aa_status *dom_paa;
895 dom_paa = find_dominating_aa_status (fbi, bb, index);
896 if (dom_paa)
897 *paa = *dom_paa;
898 else
899 paa->valid = true;
900 }
901
902 return paa;
903 }
904
905 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
906 a value known not to be modified in this function before reaching the
907 statement STMT. FBI holds information about the function we have so far
908 gathered but do not survive the summary building stage. */
909
910 static bool
911 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
912 gimple stmt, tree parm_load)
913 {
914 struct param_aa_status *paa;
915 bool modified = false;
916 ao_ref refd;
917
918 /* FIXME: FBI can be NULL if we are being called from outside
919 ipa_node_analysis or ipcp_transform_function, which currently happens
920 during inlining analysis. It would be great to extend fbi's lifetime and
921 always have it. Currently, we are just not afraid of too much walking in
922 that case. */
923 if (fbi)
924 {
925 if (aa_overwalked (fbi))
926 return false;
927 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
928 if (paa->parm_modified)
929 return false;
930 }
931 else
932 paa = NULL;
933
934 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
935 ao_ref_init (&refd, parm_load);
936 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
937 &modified, NULL);
938 if (fbi)
939 fbi->aa_walked += walked;
940 if (paa && modified)
941 paa->parm_modified = true;
942 return !modified;
943 }
944
945 /* If STMT is an assignment that loads a value from an parameter declaration,
946 return the index of the parameter in ipa_node_params which has not been
947 modified. Otherwise return -1. */
948
949 static int
950 load_from_unmodified_param (struct func_body_info *fbi,
951 vec<ipa_param_descriptor> descriptors,
952 gimple stmt)
953 {
954 int index;
955 tree op1;
956
957 if (!gimple_assign_single_p (stmt))
958 return -1;
959
960 op1 = gimple_assign_rhs1 (stmt);
961 if (TREE_CODE (op1) != PARM_DECL)
962 return -1;
963
964 index = ipa_get_param_decl_index_1 (descriptors, op1);
965 if (index < 0
966 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
967 return -1;
968
969 return index;
970 }
971
972 /* Return true if memory reference REF (which must be a load through parameter
973 with INDEX) loads data that are known to be unmodified in this function
974 before reaching statement STMT. */
975
976 static bool
977 parm_ref_data_preserved_p (struct func_body_info *fbi,
978 int index, gimple stmt, tree ref)
979 {
980 struct param_aa_status *paa;
981 bool modified = false;
982 ao_ref refd;
983
984 /* FIXME: FBI can be NULL if we are being called from outside
985 ipa_node_analysis or ipcp_transform_function, which currently happens
986 during inlining analysis. It would be great to extend fbi's lifetime and
987 always have it. Currently, we are just not afraid of too much walking in
988 that case. */
989 if (fbi)
990 {
991 if (aa_overwalked (fbi))
992 return false;
993 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
994 if (paa->ref_modified)
995 return false;
996 }
997 else
998 paa = NULL;
999
1000 gcc_checking_assert (gimple_vuse (stmt));
1001 ao_ref_init (&refd, ref);
1002 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1003 &modified, NULL);
1004 if (fbi)
1005 fbi->aa_walked += walked;
1006 if (paa && modified)
1007 paa->ref_modified = true;
1008 return !modified;
1009 }
1010
1011 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1012 is known to be unmodified in this function before reaching call statement
1013 CALL into which it is passed. FBI describes the function body. */
1014
1015 static bool
1016 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1017 gimple call, tree parm)
1018 {
1019 bool modified = false;
1020 ao_ref refd;
1021
1022 /* It's unnecessary to calculate anything about memory contnets for a const
1023 function because it is not goin to use it. But do not cache the result
1024 either. Also, no such calculations for non-pointers. */
1025 if (!gimple_vuse (call)
1026 || !POINTER_TYPE_P (TREE_TYPE (parm))
1027 || aa_overwalked (fbi))
1028 return false;
1029
1030 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1031 index);
1032 if (paa->pt_modified)
1033 return false;
1034
1035 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1036 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1037 &modified, NULL);
1038 fbi->aa_walked += walked;
1039 if (modified)
1040 paa->pt_modified = true;
1041 return !modified;
1042 }
1043
1044 /* Return true if we can prove that OP is a memory reference loading unmodified
1045 data from an aggregate passed as a parameter and if the aggregate is passed
1046 by reference, that the alias type of the load corresponds to the type of the
1047 formal parameter (so that we can rely on this type for TBAA in callers).
1048 INFO and PARMS_AINFO describe parameters of the current function (but the
1049 latter can be NULL), STMT is the load statement. If function returns true,
1050 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1051 within the aggregate and whether it is a load from a value passed by
1052 reference respectively. */
1053
1054 static bool
1055 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1056 vec<ipa_param_descriptor> descriptors,
1057 gimple stmt, tree op, int *index_p,
1058 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1059 bool *by_ref_p)
1060 {
1061 int index;
1062 HOST_WIDE_INT size, max_size;
1063 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1064
1065 if (max_size == -1 || max_size != size || *offset_p < 0)
1066 return false;
1067
1068 if (DECL_P (base))
1069 {
1070 int index = ipa_get_param_decl_index_1 (descriptors, base);
1071 if (index >= 0
1072 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1073 {
1074 *index_p = index;
1075 *by_ref_p = false;
1076 if (size_p)
1077 *size_p = size;
1078 return true;
1079 }
1080 return false;
1081 }
1082
1083 if (TREE_CODE (base) != MEM_REF
1084 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1085 || !integer_zerop (TREE_OPERAND (base, 1)))
1086 return false;
1087
1088 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1089 {
1090 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1091 index = ipa_get_param_decl_index_1 (descriptors, parm);
1092 }
1093 else
1094 {
1095 /* This branch catches situations where a pointer parameter is not a
1096 gimple register, for example:
1097
1098 void hip7(S*) (struct S * p)
1099 {
1100 void (*<T2e4>) (struct S *) D.1867;
1101 struct S * p.1;
1102
1103 <bb 2>:
1104 p.1_1 = p;
1105 D.1867_2 = p.1_1->f;
1106 D.1867_2 ();
1107 gdp = &p;
1108 */
1109
1110 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1111 index = load_from_unmodified_param (fbi, descriptors, def);
1112 }
1113
1114 if (index >= 0
1115 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1116 {
1117 *index_p = index;
1118 *by_ref_p = true;
1119 if (size_p)
1120 *size_p = size;
1121 return true;
1122 }
1123 return false;
1124 }
1125
1126 /* Just like the previous function, just without the param_analysis_info
1127 pointer, for users outside of this file. */
1128
1129 bool
1130 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1131 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1132 bool *by_ref_p)
1133 {
1134 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1135 offset_p, NULL, by_ref_p);
1136 }
1137
1138 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1139 of an assignment statement STMT, try to determine whether we are actually
1140 handling any of the following cases and construct an appropriate jump
1141 function into JFUNC if so:
1142
1143 1) The passed value is loaded from a formal parameter which is not a gimple
1144 register (most probably because it is addressable, the value has to be
1145 scalar) and we can guarantee the value has not changed. This case can
1146 therefore be described by a simple pass-through jump function. For example:
1147
1148 foo (int a)
1149 {
1150 int a.0;
1151
1152 a.0_2 = a;
1153 bar (a.0_2);
1154
1155 2) The passed value can be described by a simple arithmetic pass-through
1156 jump function. E.g.
1157
1158 foo (int a)
1159 {
1160 int D.2064;
1161
1162 D.2064_4 = a.1(D) + 4;
1163 bar (D.2064_4);
1164
1165 This case can also occur in combination of the previous one, e.g.:
1166
1167 foo (int a, int z)
1168 {
1169 int a.0;
1170 int D.2064;
1171
1172 a.0_3 = a;
1173 D.2064_4 = a.0_3 + 4;
1174 foo (D.2064_4);
1175
1176 3) The passed value is an address of an object within another one (which
1177 also passed by reference). Such situations are described by an ancestor
1178 jump function and describe situations such as:
1179
1180 B::foo() (struct B * const this)
1181 {
1182 struct A * D.1845;
1183
1184 D.1845_2 = &this_1(D)->D.1748;
1185 A::bar (D.1845_2);
1186
1187 INFO is the structure describing individual parameters access different
1188 stages of IPA optimizations. PARMS_AINFO contains the information that is
1189 only needed for intraprocedural analysis. */
1190
1191 static void
1192 compute_complex_assign_jump_func (struct func_body_info *fbi,
1193 struct ipa_node_params *info,
1194 struct ipa_jump_func *jfunc,
1195 gcall *call, gimple stmt, tree name,
1196 tree param_type)
1197 {
1198 HOST_WIDE_INT offset, size, max_size;
1199 tree op1, tc_ssa, base, ssa;
1200 int index;
1201
1202 op1 = gimple_assign_rhs1 (stmt);
1203
1204 if (TREE_CODE (op1) == SSA_NAME)
1205 {
1206 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1207 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1208 else
1209 index = load_from_unmodified_param (fbi, info->descriptors,
1210 SSA_NAME_DEF_STMT (op1));
1211 tc_ssa = op1;
1212 }
1213 else
1214 {
1215 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1216 tc_ssa = gimple_assign_lhs (stmt);
1217 }
1218
1219 if (index >= 0)
1220 {
1221 tree op2 = gimple_assign_rhs2 (stmt);
1222
1223 if (op2)
1224 {
1225 if (!is_gimple_ip_invariant (op2)
1226 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1227 && !useless_type_conversion_p (TREE_TYPE (name),
1228 TREE_TYPE (op1))))
1229 return;
1230
1231 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1232 gimple_assign_rhs_code (stmt));
1233 }
1234 else if (gimple_assign_single_p (stmt))
1235 {
1236 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1237 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1238 }
1239 return;
1240 }
1241
1242 if (TREE_CODE (op1) != ADDR_EXPR)
1243 return;
1244 op1 = TREE_OPERAND (op1, 0);
1245 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1246 return;
1247 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1248 if (TREE_CODE (base) != MEM_REF
1249 /* If this is a varying address, punt. */
1250 || max_size == -1
1251 || max_size != size)
1252 return;
1253 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1254 ssa = TREE_OPERAND (base, 0);
1255 if (TREE_CODE (ssa) != SSA_NAME
1256 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1257 || offset < 0)
1258 return;
1259
1260 /* Dynamic types are changed in constructors and destructors. */
1261 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1262 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1263 ipa_set_ancestor_jf (jfunc, offset, index,
1264 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1265 }
1266
1267 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1268 it looks like:
1269
1270 iftmp.1_3 = &obj_2(D)->D.1762;
1271
1272 The base of the MEM_REF must be a default definition SSA NAME of a
1273 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1274 whole MEM_REF expression is returned and the offset calculated from any
1275 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1276 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1277
1278 static tree
1279 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1280 {
1281 HOST_WIDE_INT size, max_size;
1282 tree expr, parm, obj;
1283
1284 if (!gimple_assign_single_p (assign))
1285 return NULL_TREE;
1286 expr = gimple_assign_rhs1 (assign);
1287
1288 if (TREE_CODE (expr) != ADDR_EXPR)
1289 return NULL_TREE;
1290 expr = TREE_OPERAND (expr, 0);
1291 obj = expr;
1292 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1293
1294 if (TREE_CODE (expr) != MEM_REF
1295 /* If this is a varying address, punt. */
1296 || max_size == -1
1297 || max_size != size
1298 || *offset < 0)
1299 return NULL_TREE;
1300 parm = TREE_OPERAND (expr, 0);
1301 if (TREE_CODE (parm) != SSA_NAME
1302 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1303 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1304 return NULL_TREE;
1305
1306 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1307 *obj_p = obj;
1308 return expr;
1309 }
1310
1311
1312 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1313 statement PHI, try to find out whether NAME is in fact a
1314 multiple-inheritance typecast from a descendant into an ancestor of a formal
1315 parameter and thus can be described by an ancestor jump function and if so,
1316 write the appropriate function into JFUNC.
1317
1318 Essentially we want to match the following pattern:
1319
1320 if (obj_2(D) != 0B)
1321 goto <bb 3>;
1322 else
1323 goto <bb 4>;
1324
1325 <bb 3>:
1326 iftmp.1_3 = &obj_2(D)->D.1762;
1327
1328 <bb 4>:
1329 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1330 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1331 return D.1879_6; */
1332
1333 static void
1334 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1335 struct ipa_node_params *info,
1336 struct ipa_jump_func *jfunc,
1337 gcall *call, gphi *phi)
1338 {
1339 HOST_WIDE_INT offset;
1340 gimple assign, cond;
1341 basic_block phi_bb, assign_bb, cond_bb;
1342 tree tmp, parm, expr, obj;
1343 int index, i;
1344
1345 if (gimple_phi_num_args (phi) != 2)
1346 return;
1347
1348 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1349 tmp = PHI_ARG_DEF (phi, 0);
1350 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1351 tmp = PHI_ARG_DEF (phi, 1);
1352 else
1353 return;
1354 if (TREE_CODE (tmp) != SSA_NAME
1355 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1356 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1357 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1358 return;
1359
1360 assign = SSA_NAME_DEF_STMT (tmp);
1361 assign_bb = gimple_bb (assign);
1362 if (!single_pred_p (assign_bb))
1363 return;
1364 expr = get_ancestor_addr_info (assign, &obj, &offset);
1365 if (!expr)
1366 return;
1367 parm = TREE_OPERAND (expr, 0);
1368 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1369 if (index < 0)
1370 return;
1371
1372 cond_bb = single_pred (assign_bb);
1373 cond = last_stmt (cond_bb);
1374 if (!cond
1375 || gimple_code (cond) != GIMPLE_COND
1376 || gimple_cond_code (cond) != NE_EXPR
1377 || gimple_cond_lhs (cond) != parm
1378 || !integer_zerop (gimple_cond_rhs (cond)))
1379 return;
1380
1381 phi_bb = gimple_bb (phi);
1382 for (i = 0; i < 2; i++)
1383 {
1384 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1385 if (pred != assign_bb && pred != cond_bb)
1386 return;
1387 }
1388
1389 ipa_set_ancestor_jf (jfunc, offset, index,
1390 parm_ref_data_pass_through_p (fbi, index, call, parm));
1391 }
1392
1393 /* Inspect the given TYPE and return true iff it has the same structure (the
1394 same number of fields of the same types) as a C++ member pointer. If
1395 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1396 corresponding fields there. */
1397
1398 static bool
1399 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1400 {
1401 tree fld;
1402
1403 if (TREE_CODE (type) != RECORD_TYPE)
1404 return false;
1405
1406 fld = TYPE_FIELDS (type);
1407 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1408 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1409 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1410 return false;
1411
1412 if (method_ptr)
1413 *method_ptr = fld;
1414
1415 fld = DECL_CHAIN (fld);
1416 if (!fld || INTEGRAL_TYPE_P (fld)
1417 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1418 return false;
1419 if (delta)
1420 *delta = fld;
1421
1422 if (DECL_CHAIN (fld))
1423 return false;
1424
1425 return true;
1426 }
1427
1428 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1429 return the rhs of its defining statement. Otherwise return RHS as it
1430 is. */
1431
1432 static inline tree
1433 get_ssa_def_if_simple_copy (tree rhs)
1434 {
1435 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1436 {
1437 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1438
1439 if (gimple_assign_single_p (def_stmt))
1440 rhs = gimple_assign_rhs1 (def_stmt);
1441 else
1442 break;
1443 }
1444 return rhs;
1445 }
1446
1447 /* Simple linked list, describing known contents of an aggregate beforere
1448 call. */
1449
1450 struct ipa_known_agg_contents_list
1451 {
1452 /* Offset and size of the described part of the aggregate. */
1453 HOST_WIDE_INT offset, size;
1454 /* Known constant value or NULL if the contents is known to be unknown. */
1455 tree constant;
1456 /* Pointer to the next structure in the list. */
1457 struct ipa_known_agg_contents_list *next;
1458 };
1459
1460 /* Find the proper place in linked list of ipa_known_agg_contents_list
1461 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1462 unless there is a partial overlap, in which case return NULL, or such
1463 element is already there, in which case set *ALREADY_THERE to true. */
1464
1465 static struct ipa_known_agg_contents_list **
1466 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1467 HOST_WIDE_INT lhs_offset,
1468 HOST_WIDE_INT lhs_size,
1469 bool *already_there)
1470 {
1471 struct ipa_known_agg_contents_list **p = list;
1472 while (*p && (*p)->offset < lhs_offset)
1473 {
1474 if ((*p)->offset + (*p)->size > lhs_offset)
1475 return NULL;
1476 p = &(*p)->next;
1477 }
1478
1479 if (*p && (*p)->offset < lhs_offset + lhs_size)
1480 {
1481 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1482 /* We already know this value is subsequently overwritten with
1483 something else. */
1484 *already_there = true;
1485 else
1486 /* Otherwise this is a partial overlap which we cannot
1487 represent. */
1488 return NULL;
1489 }
1490 return p;
1491 }
1492
1493 /* Build aggregate jump function from LIST, assuming there are exactly
1494 CONST_COUNT constant entries there and that th offset of the passed argument
1495 is ARG_OFFSET and store it into JFUNC. */
1496
1497 static void
1498 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1499 int const_count, HOST_WIDE_INT arg_offset,
1500 struct ipa_jump_func *jfunc)
1501 {
1502 vec_alloc (jfunc->agg.items, const_count);
1503 while (list)
1504 {
1505 if (list->constant)
1506 {
1507 struct ipa_agg_jf_item item;
1508 item.offset = list->offset - arg_offset;
1509 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1510 item.value = unshare_expr_without_location (list->constant);
1511 jfunc->agg.items->quick_push (item);
1512 }
1513 list = list->next;
1514 }
1515 }
1516
1517 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1518 in ARG is filled in with constant values. ARG can either be an aggregate
1519 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1520 aggregate. JFUNC is the jump function into which the constants are
1521 subsequently stored. */
1522
1523 static void
1524 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1525 tree arg_type,
1526 struct ipa_jump_func *jfunc)
1527 {
1528 struct ipa_known_agg_contents_list *list = NULL;
1529 int item_count = 0, const_count = 0;
1530 HOST_WIDE_INT arg_offset, arg_size;
1531 gimple_stmt_iterator gsi;
1532 tree arg_base;
1533 bool check_ref, by_ref;
1534 ao_ref r;
1535
1536 /* The function operates in three stages. First, we prepare check_ref, r,
1537 arg_base and arg_offset based on what is actually passed as an actual
1538 argument. */
1539
1540 if (POINTER_TYPE_P (arg_type))
1541 {
1542 by_ref = true;
1543 if (TREE_CODE (arg) == SSA_NAME)
1544 {
1545 tree type_size;
1546 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1547 return;
1548 check_ref = true;
1549 arg_base = arg;
1550 arg_offset = 0;
1551 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1552 arg_size = tree_to_uhwi (type_size);
1553 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1554 }
1555 else if (TREE_CODE (arg) == ADDR_EXPR)
1556 {
1557 HOST_WIDE_INT arg_max_size;
1558
1559 arg = TREE_OPERAND (arg, 0);
1560 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1561 &arg_max_size);
1562 if (arg_max_size == -1
1563 || arg_max_size != arg_size
1564 || arg_offset < 0)
1565 return;
1566 if (DECL_P (arg_base))
1567 {
1568 check_ref = false;
1569 ao_ref_init (&r, arg_base);
1570 }
1571 else
1572 return;
1573 }
1574 else
1575 return;
1576 }
1577 else
1578 {
1579 HOST_WIDE_INT arg_max_size;
1580
1581 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1582
1583 by_ref = false;
1584 check_ref = false;
1585 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1586 &arg_max_size);
1587 if (arg_max_size == -1
1588 || arg_max_size != arg_size
1589 || arg_offset < 0)
1590 return;
1591
1592 ao_ref_init (&r, arg);
1593 }
1594
1595 /* Second stage walks back the BB, looks at individual statements and as long
1596 as it is confident of how the statements affect contents of the
1597 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1598 describing it. */
1599 gsi = gsi_for_stmt (call);
1600 gsi_prev (&gsi);
1601 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1602 {
1603 struct ipa_known_agg_contents_list *n, **p;
1604 gimple stmt = gsi_stmt (gsi);
1605 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1606 tree lhs, rhs, lhs_base;
1607
1608 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1609 continue;
1610 if (!gimple_assign_single_p (stmt))
1611 break;
1612
1613 lhs = gimple_assign_lhs (stmt);
1614 rhs = gimple_assign_rhs1 (stmt);
1615 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1616 || TREE_CODE (lhs) == BIT_FIELD_REF
1617 || contains_bitfld_component_ref_p (lhs))
1618 break;
1619
1620 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1621 &lhs_max_size);
1622 if (lhs_max_size == -1
1623 || lhs_max_size != lhs_size)
1624 break;
1625
1626 if (check_ref)
1627 {
1628 if (TREE_CODE (lhs_base) != MEM_REF
1629 || TREE_OPERAND (lhs_base, 0) != arg_base
1630 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1631 break;
1632 }
1633 else if (lhs_base != arg_base)
1634 {
1635 if (DECL_P (lhs_base))
1636 continue;
1637 else
1638 break;
1639 }
1640
1641 bool already_there = false;
1642 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1643 &already_there);
1644 if (!p)
1645 break;
1646 if (already_there)
1647 continue;
1648
1649 rhs = get_ssa_def_if_simple_copy (rhs);
1650 n = XALLOCA (struct ipa_known_agg_contents_list);
1651 n->size = lhs_size;
1652 n->offset = lhs_offset;
1653 if (is_gimple_ip_invariant (rhs))
1654 {
1655 n->constant = rhs;
1656 const_count++;
1657 }
1658 else
1659 n->constant = NULL_TREE;
1660 n->next = *p;
1661 *p = n;
1662
1663 item_count++;
1664 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1665 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1666 break;
1667 }
1668
1669 /* Third stage just goes over the list and creates an appropriate vector of
1670 ipa_agg_jf_item structures out of it, of sourse only if there are
1671 any known constants to begin with. */
1672
1673 if (const_count)
1674 {
1675 jfunc->agg.by_ref = by_ref;
1676 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1677 }
1678 }
1679
1680 static tree
1681 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1682 {
1683 int n;
1684 tree type = (e->callee
1685 ? TREE_TYPE (e->callee->decl)
1686 : gimple_call_fntype (e->call_stmt));
1687 tree t = TYPE_ARG_TYPES (type);
1688
1689 for (n = 0; n < i; n++)
1690 {
1691 if (!t)
1692 break;
1693 t = TREE_CHAIN (t);
1694 }
1695 if (t)
1696 return TREE_VALUE (t);
1697 if (!e->callee)
1698 return NULL;
1699 t = DECL_ARGUMENTS (e->callee->decl);
1700 for (n = 0; n < i; n++)
1701 {
1702 if (!t)
1703 return NULL;
1704 t = TREE_CHAIN (t);
1705 }
1706 if (t)
1707 return TREE_TYPE (t);
1708 return NULL;
1709 }
1710
1711 /* Compute jump function for all arguments of callsite CS and insert the
1712 information in the jump_functions array in the ipa_edge_args corresponding
1713 to this callsite. */
1714
1715 static void
1716 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1717 struct cgraph_edge *cs)
1718 {
1719 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1720 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1721 gcall *call = cs->call_stmt;
1722 int n, arg_num = gimple_call_num_args (call);
1723 bool useful_context = false;
1724
1725 if (arg_num == 0 || args->jump_functions)
1726 return;
1727 vec_safe_grow_cleared (args->jump_functions, arg_num);
1728 if (flag_devirtualize)
1729 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1730
1731 if (gimple_call_internal_p (call))
1732 return;
1733 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1734 return;
1735
1736 for (n = 0; n < arg_num; n++)
1737 {
1738 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1739 tree arg = gimple_call_arg (call, n);
1740 tree param_type = ipa_get_callee_param_type (cs, n);
1741 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1742 {
1743 tree instance;
1744 struct ipa_polymorphic_call_context context (cs->caller->decl,
1745 arg, cs->call_stmt,
1746 &instance);
1747 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1748 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1749 if (!context.useless_p ())
1750 useful_context = true;
1751 }
1752
1753 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1754 {
1755 unsigned HOST_WIDE_INT hwi_bitpos;
1756 unsigned align;
1757
1758 if (get_pointer_alignment_1 (arg, &align, &hwi_bitpos)
1759 && align % BITS_PER_UNIT == 0
1760 && hwi_bitpos % BITS_PER_UNIT == 0)
1761 {
1762 jfunc->alignment.known = true;
1763 jfunc->alignment.align = align / BITS_PER_UNIT;
1764 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1765 }
1766 else
1767 gcc_assert (!jfunc->alignment.known);
1768 }
1769 else
1770 gcc_assert (!jfunc->alignment.known);
1771
1772 if (is_gimple_ip_invariant (arg))
1773 ipa_set_jf_constant (jfunc, arg, cs);
1774 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1775 && TREE_CODE (arg) == PARM_DECL)
1776 {
1777 int index = ipa_get_param_decl_index (info, arg);
1778
1779 gcc_assert (index >=0);
1780 /* Aggregate passed by value, check for pass-through, otherwise we
1781 will attempt to fill in aggregate contents later in this
1782 for cycle. */
1783 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1784 {
1785 ipa_set_jf_simple_pass_through (jfunc, index, false);
1786 continue;
1787 }
1788 }
1789 else if (TREE_CODE (arg) == SSA_NAME)
1790 {
1791 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1792 {
1793 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1794 if (index >= 0)
1795 {
1796 bool agg_p;
1797 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1798 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1799 }
1800 }
1801 else
1802 {
1803 gimple stmt = SSA_NAME_DEF_STMT (arg);
1804 if (is_gimple_assign (stmt))
1805 compute_complex_assign_jump_func (fbi, info, jfunc,
1806 call, stmt, arg, param_type);
1807 else if (gimple_code (stmt) == GIMPLE_PHI)
1808 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1809 call,
1810 as_a <gphi *> (stmt));
1811 }
1812 }
1813
1814 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1815 passed (because type conversions are ignored in gimple). Usually we can
1816 safely get type from function declaration, but in case of K&R prototypes or
1817 variadic functions we can try our luck with type of the pointer passed.
1818 TODO: Since we look for actual initialization of the memory object, we may better
1819 work out the type based on the memory stores we find. */
1820 if (!param_type)
1821 param_type = TREE_TYPE (arg);
1822
1823 if ((jfunc->type != IPA_JF_PASS_THROUGH
1824 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1825 && (jfunc->type != IPA_JF_ANCESTOR
1826 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1827 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1828 || POINTER_TYPE_P (param_type)))
1829 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1830 }
1831 if (!useful_context)
1832 vec_free (args->polymorphic_call_contexts);
1833 }
1834
1835 /* Compute jump functions for all edges - both direct and indirect - outgoing
1836 from BB. */
1837
1838 static void
1839 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1840 {
1841 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1842 int i;
1843 struct cgraph_edge *cs;
1844
1845 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1846 {
1847 struct cgraph_node *callee = cs->callee;
1848
1849 if (callee)
1850 {
1851 callee->ultimate_alias_target ();
1852 /* We do not need to bother analyzing calls to unknown functions
1853 unless they may become known during lto/whopr. */
1854 if (!callee->definition && !flag_lto)
1855 continue;
1856 }
1857 ipa_compute_jump_functions_for_edge (fbi, cs);
1858 }
1859 }
1860
1861 /* If STMT looks like a statement loading a value from a member pointer formal
1862 parameter, return that parameter and store the offset of the field to
1863 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1864 might be clobbered). If USE_DELTA, then we look for a use of the delta
1865 field rather than the pfn. */
1866
1867 static tree
1868 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1869 HOST_WIDE_INT *offset_p)
1870 {
1871 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1872
1873 if (!gimple_assign_single_p (stmt))
1874 return NULL_TREE;
1875
1876 rhs = gimple_assign_rhs1 (stmt);
1877 if (TREE_CODE (rhs) == COMPONENT_REF)
1878 {
1879 ref_field = TREE_OPERAND (rhs, 1);
1880 rhs = TREE_OPERAND (rhs, 0);
1881 }
1882 else
1883 ref_field = NULL_TREE;
1884 if (TREE_CODE (rhs) != MEM_REF)
1885 return NULL_TREE;
1886 rec = TREE_OPERAND (rhs, 0);
1887 if (TREE_CODE (rec) != ADDR_EXPR)
1888 return NULL_TREE;
1889 rec = TREE_OPERAND (rec, 0);
1890 if (TREE_CODE (rec) != PARM_DECL
1891 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1892 return NULL_TREE;
1893 ref_offset = TREE_OPERAND (rhs, 1);
1894
1895 if (use_delta)
1896 fld = delta_field;
1897 else
1898 fld = ptr_field;
1899 if (offset_p)
1900 *offset_p = int_bit_position (fld);
1901
1902 if (ref_field)
1903 {
1904 if (integer_nonzerop (ref_offset))
1905 return NULL_TREE;
1906 return ref_field == fld ? rec : NULL_TREE;
1907 }
1908 else
1909 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1910 : NULL_TREE;
1911 }
1912
1913 /* Returns true iff T is an SSA_NAME defined by a statement. */
1914
1915 static bool
1916 ipa_is_ssa_with_stmt_def (tree t)
1917 {
1918 if (TREE_CODE (t) == SSA_NAME
1919 && !SSA_NAME_IS_DEFAULT_DEF (t))
1920 return true;
1921 else
1922 return false;
1923 }
1924
1925 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1926 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1927 indirect call graph edge. */
1928
1929 static struct cgraph_edge *
1930 ipa_note_param_call (struct cgraph_node *node, int param_index,
1931 gcall *stmt)
1932 {
1933 struct cgraph_edge *cs;
1934
1935 cs = node->get_edge (stmt);
1936 cs->indirect_info->param_index = param_index;
1937 cs->indirect_info->agg_contents = 0;
1938 cs->indirect_info->member_ptr = 0;
1939 return cs;
1940 }
1941
1942 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1943 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1944 intermediate information about each formal parameter. Currently it checks
1945 whether the call calls a pointer that is a formal parameter and if so, the
1946 parameter is marked with the called flag and an indirect call graph edge
1947 describing the call is created. This is very simple for ordinary pointers
1948 represented in SSA but not-so-nice when it comes to member pointers. The
1949 ugly part of this function does nothing more than trying to match the
1950 pattern of such a call. An example of such a pattern is the gimple dump
1951 below, the call is on the last line:
1952
1953 <bb 2>:
1954 f$__delta_5 = f.__delta;
1955 f$__pfn_24 = f.__pfn;
1956
1957 or
1958 <bb 2>:
1959 f$__delta_5 = MEM[(struct *)&f];
1960 f$__pfn_24 = MEM[(struct *)&f + 4B];
1961
1962 and a few lines below:
1963
1964 <bb 5>
1965 D.2496_3 = (int) f$__pfn_24;
1966 D.2497_4 = D.2496_3 & 1;
1967 if (D.2497_4 != 0)
1968 goto <bb 3>;
1969 else
1970 goto <bb 4>;
1971
1972 <bb 6>:
1973 D.2500_7 = (unsigned int) f$__delta_5;
1974 D.2501_8 = &S + D.2500_7;
1975 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1976 D.2503_10 = *D.2502_9;
1977 D.2504_12 = f$__pfn_24 + -1;
1978 D.2505_13 = (unsigned int) D.2504_12;
1979 D.2506_14 = D.2503_10 + D.2505_13;
1980 D.2507_15 = *D.2506_14;
1981 iftmp.11_16 = (String:: *) D.2507_15;
1982
1983 <bb 7>:
1984 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1985 D.2500_19 = (unsigned int) f$__delta_5;
1986 D.2508_20 = &S + D.2500_19;
1987 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1988
1989 Such patterns are results of simple calls to a member pointer:
1990
1991 int doprinting (int (MyString::* f)(int) const)
1992 {
1993 MyString S ("somestring");
1994
1995 return (S.*f)(4);
1996 }
1997
1998 Moreover, the function also looks for called pointers loaded from aggregates
1999 passed by value or reference. */
2000
2001 static void
2002 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gcall *call,
2003 tree target)
2004 {
2005 struct ipa_node_params *info = fbi->info;
2006 HOST_WIDE_INT offset;
2007 bool by_ref;
2008
2009 if (SSA_NAME_IS_DEFAULT_DEF (target))
2010 {
2011 tree var = SSA_NAME_VAR (target);
2012 int index = ipa_get_param_decl_index (info, var);
2013 if (index >= 0)
2014 ipa_note_param_call (fbi->node, index, call);
2015 return;
2016 }
2017
2018 int index;
2019 gimple def = SSA_NAME_DEF_STMT (target);
2020 if (gimple_assign_single_p (def)
2021 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2022 gimple_assign_rhs1 (def), &index, &offset,
2023 NULL, &by_ref))
2024 {
2025 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2026 cs->indirect_info->offset = offset;
2027 cs->indirect_info->agg_contents = 1;
2028 cs->indirect_info->by_ref = by_ref;
2029 return;
2030 }
2031
2032 /* Now we need to try to match the complex pattern of calling a member
2033 pointer. */
2034 if (gimple_code (def) != GIMPLE_PHI
2035 || gimple_phi_num_args (def) != 2
2036 || !POINTER_TYPE_P (TREE_TYPE (target))
2037 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2038 return;
2039
2040 /* First, we need to check whether one of these is a load from a member
2041 pointer that is a parameter to this function. */
2042 tree n1 = PHI_ARG_DEF (def, 0);
2043 tree n2 = PHI_ARG_DEF (def, 1);
2044 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2045 return;
2046 gimple d1 = SSA_NAME_DEF_STMT (n1);
2047 gimple d2 = SSA_NAME_DEF_STMT (n2);
2048
2049 tree rec;
2050 basic_block bb, virt_bb;
2051 basic_block join = gimple_bb (def);
2052 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2053 {
2054 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2055 return;
2056
2057 bb = EDGE_PRED (join, 0)->src;
2058 virt_bb = gimple_bb (d2);
2059 }
2060 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2061 {
2062 bb = EDGE_PRED (join, 1)->src;
2063 virt_bb = gimple_bb (d1);
2064 }
2065 else
2066 return;
2067
2068 /* Second, we need to check that the basic blocks are laid out in the way
2069 corresponding to the pattern. */
2070
2071 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2072 || single_pred (virt_bb) != bb
2073 || single_succ (virt_bb) != join)
2074 return;
2075
2076 /* Third, let's see that the branching is done depending on the least
2077 significant bit of the pfn. */
2078
2079 gimple branch = last_stmt (bb);
2080 if (!branch || gimple_code (branch) != GIMPLE_COND)
2081 return;
2082
2083 if ((gimple_cond_code (branch) != NE_EXPR
2084 && gimple_cond_code (branch) != EQ_EXPR)
2085 || !integer_zerop (gimple_cond_rhs (branch)))
2086 return;
2087
2088 tree cond = gimple_cond_lhs (branch);
2089 if (!ipa_is_ssa_with_stmt_def (cond))
2090 return;
2091
2092 def = SSA_NAME_DEF_STMT (cond);
2093 if (!is_gimple_assign (def)
2094 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2095 || !integer_onep (gimple_assign_rhs2 (def)))
2096 return;
2097
2098 cond = gimple_assign_rhs1 (def);
2099 if (!ipa_is_ssa_with_stmt_def (cond))
2100 return;
2101
2102 def = SSA_NAME_DEF_STMT (cond);
2103
2104 if (is_gimple_assign (def)
2105 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2106 {
2107 cond = gimple_assign_rhs1 (def);
2108 if (!ipa_is_ssa_with_stmt_def (cond))
2109 return;
2110 def = SSA_NAME_DEF_STMT (cond);
2111 }
2112
2113 tree rec2;
2114 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2115 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2116 == ptrmemfunc_vbit_in_delta),
2117 NULL);
2118 if (rec != rec2)
2119 return;
2120
2121 index = ipa_get_param_decl_index (info, rec);
2122 if (index >= 0
2123 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2124 {
2125 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2126 cs->indirect_info->offset = offset;
2127 cs->indirect_info->agg_contents = 1;
2128 cs->indirect_info->member_ptr = 1;
2129 }
2130
2131 return;
2132 }
2133
2134 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2135 object referenced in the expression is a formal parameter of the caller
2136 FBI->node (described by FBI->info), create a call note for the
2137 statement. */
2138
2139 static void
2140 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2141 gcall *call, tree target)
2142 {
2143 tree obj = OBJ_TYPE_REF_OBJECT (target);
2144 int index;
2145 HOST_WIDE_INT anc_offset;
2146
2147 if (!flag_devirtualize)
2148 return;
2149
2150 if (TREE_CODE (obj) != SSA_NAME)
2151 return;
2152
2153 struct ipa_node_params *info = fbi->info;
2154 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2155 {
2156 struct ipa_jump_func jfunc;
2157 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2158 return;
2159
2160 anc_offset = 0;
2161 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2162 gcc_assert (index >= 0);
2163 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2164 call, &jfunc))
2165 return;
2166 }
2167 else
2168 {
2169 struct ipa_jump_func jfunc;
2170 gimple stmt = SSA_NAME_DEF_STMT (obj);
2171 tree expr;
2172
2173 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2174 if (!expr)
2175 return;
2176 index = ipa_get_param_decl_index (info,
2177 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2178 gcc_assert (index >= 0);
2179 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2180 call, &jfunc, anc_offset))
2181 return;
2182 }
2183
2184 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2185 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2186 ii->offset = anc_offset;
2187 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2188 ii->otr_type = obj_type_ref_class (target);
2189 ii->polymorphic = 1;
2190 }
2191
2192 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2193 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2194 containing intermediate information about each formal parameter. */
2195
2196 static void
2197 ipa_analyze_call_uses (struct func_body_info *fbi, gcall *call)
2198 {
2199 tree target = gimple_call_fn (call);
2200
2201 if (!target
2202 || (TREE_CODE (target) != SSA_NAME
2203 && !virtual_method_call_p (target)))
2204 return;
2205
2206 struct cgraph_edge *cs = fbi->node->get_edge (call);
2207 /* If we previously turned the call into a direct call, there is
2208 no need to analyze. */
2209 if (cs && !cs->indirect_unknown_callee)
2210 return;
2211
2212 if (cs->indirect_info->polymorphic && flag_devirtualize)
2213 {
2214 tree instance;
2215 tree target = gimple_call_fn (call);
2216 ipa_polymorphic_call_context context (current_function_decl,
2217 target, call, &instance);
2218
2219 gcc_checking_assert (cs->indirect_info->otr_type
2220 == obj_type_ref_class (target));
2221 gcc_checking_assert (cs->indirect_info->otr_token
2222 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2223
2224 cs->indirect_info->vptr_changed
2225 = !context.get_dynamic_type (instance,
2226 OBJ_TYPE_REF_OBJECT (target),
2227 obj_type_ref_class (target), call);
2228 cs->indirect_info->context = context;
2229 }
2230
2231 if (TREE_CODE (target) == SSA_NAME)
2232 ipa_analyze_indirect_call_uses (fbi, call, target);
2233 else if (virtual_method_call_p (target))
2234 ipa_analyze_virtual_call_uses (fbi, call, target);
2235 }
2236
2237
2238 /* Analyze the call statement STMT with respect to formal parameters (described
2239 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2240 formal parameters are called. */
2241
2242 static void
2243 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2244 {
2245 if (is_gimple_call (stmt))
2246 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2247 }
2248
2249 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2250 If OP is a parameter declaration, mark it as used in the info structure
2251 passed in DATA. */
2252
2253 static bool
2254 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2255 {
2256 struct ipa_node_params *info = (struct ipa_node_params *) data;
2257
2258 op = get_base_address (op);
2259 if (op
2260 && TREE_CODE (op) == PARM_DECL)
2261 {
2262 int index = ipa_get_param_decl_index (info, op);
2263 gcc_assert (index >= 0);
2264 ipa_set_param_used (info, index, true);
2265 }
2266
2267 return false;
2268 }
2269
2270 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2271 the findings in various structures of the associated ipa_node_params
2272 structure, such as parameter flags, notes etc. FBI holds various data about
2273 the function being analyzed. */
2274
2275 static void
2276 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2277 {
2278 gimple_stmt_iterator gsi;
2279 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2280 {
2281 gimple stmt = gsi_stmt (gsi);
2282
2283 if (is_gimple_debug (stmt))
2284 continue;
2285
2286 ipa_analyze_stmt_uses (fbi, stmt);
2287 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2288 visit_ref_for_mod_analysis,
2289 visit_ref_for_mod_analysis,
2290 visit_ref_for_mod_analysis);
2291 }
2292 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2293 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2294 visit_ref_for_mod_analysis,
2295 visit_ref_for_mod_analysis,
2296 visit_ref_for_mod_analysis);
2297 }
2298
2299 /* Calculate controlled uses of parameters of NODE. */
2300
2301 static void
2302 ipa_analyze_controlled_uses (struct cgraph_node *node)
2303 {
2304 struct ipa_node_params *info = IPA_NODE_REF (node);
2305
2306 for (int i = 0; i < ipa_get_param_count (info); i++)
2307 {
2308 tree parm = ipa_get_param (info, i);
2309 int controlled_uses = 0;
2310
2311 /* For SSA regs see if parameter is used. For non-SSA we compute
2312 the flag during modification analysis. */
2313 if (is_gimple_reg (parm))
2314 {
2315 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2316 parm);
2317 if (ddef && !has_zero_uses (ddef))
2318 {
2319 imm_use_iterator imm_iter;
2320 use_operand_p use_p;
2321
2322 ipa_set_param_used (info, i, true);
2323 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2324 if (!is_gimple_call (USE_STMT (use_p)))
2325 {
2326 if (!is_gimple_debug (USE_STMT (use_p)))
2327 {
2328 controlled_uses = IPA_UNDESCRIBED_USE;
2329 break;
2330 }
2331 }
2332 else
2333 controlled_uses++;
2334 }
2335 else
2336 controlled_uses = 0;
2337 }
2338 else
2339 controlled_uses = IPA_UNDESCRIBED_USE;
2340 ipa_set_controlled_uses (info, i, controlled_uses);
2341 }
2342 }
2343
2344 /* Free stuff in BI. */
2345
2346 static void
2347 free_ipa_bb_info (struct ipa_bb_info *bi)
2348 {
2349 bi->cg_edges.release ();
2350 bi->param_aa_statuses.release ();
2351 }
2352
2353 /* Dominator walker driving the analysis. */
2354
2355 class analysis_dom_walker : public dom_walker
2356 {
2357 public:
2358 analysis_dom_walker (struct func_body_info *fbi)
2359 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2360
2361 virtual void before_dom_children (basic_block);
2362
2363 private:
2364 struct func_body_info *m_fbi;
2365 };
2366
2367 void
2368 analysis_dom_walker::before_dom_children (basic_block bb)
2369 {
2370 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2371 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2372 }
2373
2374 /* Initialize the array describing properties of of formal parameters
2375 of NODE, analyze their uses and compute jump functions associated
2376 with actual arguments of calls from within NODE. */
2377
2378 void
2379 ipa_analyze_node (struct cgraph_node *node)
2380 {
2381 struct func_body_info fbi;
2382 struct ipa_node_params *info;
2383
2384 ipa_check_create_node_params ();
2385 ipa_check_create_edge_args ();
2386 info = IPA_NODE_REF (node);
2387
2388 if (info->analysis_done)
2389 return;
2390 info->analysis_done = 1;
2391
2392 if (ipa_func_spec_opts_forbid_analysis_p (node))
2393 {
2394 for (int i = 0; i < ipa_get_param_count (info); i++)
2395 {
2396 ipa_set_param_used (info, i, true);
2397 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2398 }
2399 return;
2400 }
2401
2402 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2403 push_cfun (func);
2404 calculate_dominance_info (CDI_DOMINATORS);
2405 ipa_initialize_node_params (node);
2406 ipa_analyze_controlled_uses (node);
2407
2408 fbi.node = node;
2409 fbi.info = IPA_NODE_REF (node);
2410 fbi.bb_infos = vNULL;
2411 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2412 fbi.param_count = ipa_get_param_count (info);
2413 fbi.aa_walked = 0;
2414
2415 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2416 {
2417 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2418 bi->cg_edges.safe_push (cs);
2419 }
2420
2421 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2422 {
2423 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2424 bi->cg_edges.safe_push (cs);
2425 }
2426
2427 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2428
2429 int i;
2430 struct ipa_bb_info *bi;
2431 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2432 free_ipa_bb_info (bi);
2433 fbi.bb_infos.release ();
2434 free_dominance_info (CDI_DOMINATORS);
2435 pop_cfun ();
2436 }
2437
2438 /* Update the jump functions associated with call graph edge E when the call
2439 graph edge CS is being inlined, assuming that E->caller is already (possibly
2440 indirectly) inlined into CS->callee and that E has not been inlined. */
2441
2442 static void
2443 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2444 struct cgraph_edge *e)
2445 {
2446 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2447 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2448 int count = ipa_get_cs_argument_count (args);
2449 int i;
2450
2451 for (i = 0; i < count; i++)
2452 {
2453 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2454 struct ipa_polymorphic_call_context *dst_ctx
2455 = ipa_get_ith_polymorhic_call_context (args, i);
2456
2457 if (dst->type == IPA_JF_ANCESTOR)
2458 {
2459 struct ipa_jump_func *src;
2460 int dst_fid = dst->value.ancestor.formal_id;
2461 struct ipa_polymorphic_call_context *src_ctx
2462 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2463
2464 /* Variable number of arguments can cause havoc if we try to access
2465 one that does not exist in the inlined edge. So make sure we
2466 don't. */
2467 if (dst_fid >= ipa_get_cs_argument_count (top))
2468 {
2469 ipa_set_jf_unknown (dst);
2470 continue;
2471 }
2472
2473 src = ipa_get_ith_jump_func (top, dst_fid);
2474
2475 if (src_ctx && !src_ctx->useless_p ())
2476 {
2477 struct ipa_polymorphic_call_context ctx = *src_ctx;
2478
2479 /* TODO: Make type preserved safe WRT contexts. */
2480 if (!ipa_get_jf_ancestor_type_preserved (dst))
2481 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2482 ctx.offset_by (dst->value.ancestor.offset);
2483 if (!ctx.useless_p ())
2484 {
2485 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2486 count);
2487 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2488 }
2489 dst_ctx->combine_with (ctx);
2490 }
2491
2492 if (src->agg.items
2493 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2494 {
2495 struct ipa_agg_jf_item *item;
2496 int j;
2497
2498 /* Currently we do not produce clobber aggregate jump functions,
2499 replace with merging when we do. */
2500 gcc_assert (!dst->agg.items);
2501
2502 dst->agg.items = vec_safe_copy (src->agg.items);
2503 dst->agg.by_ref = src->agg.by_ref;
2504 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2505 item->offset -= dst->value.ancestor.offset;
2506 }
2507
2508 if (src->type == IPA_JF_PASS_THROUGH
2509 && src->value.pass_through.operation == NOP_EXPR)
2510 {
2511 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2512 dst->value.ancestor.agg_preserved &=
2513 src->value.pass_through.agg_preserved;
2514 }
2515 else if (src->type == IPA_JF_ANCESTOR)
2516 {
2517 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2518 dst->value.ancestor.offset += src->value.ancestor.offset;
2519 dst->value.ancestor.agg_preserved &=
2520 src->value.ancestor.agg_preserved;
2521 }
2522 else
2523 ipa_set_jf_unknown (dst);
2524 }
2525 else if (dst->type == IPA_JF_PASS_THROUGH)
2526 {
2527 struct ipa_jump_func *src;
2528 /* We must check range due to calls with variable number of arguments
2529 and we cannot combine jump functions with operations. */
2530 if (dst->value.pass_through.operation == NOP_EXPR
2531 && (dst->value.pass_through.formal_id
2532 < ipa_get_cs_argument_count (top)))
2533 {
2534 int dst_fid = dst->value.pass_through.formal_id;
2535 src = ipa_get_ith_jump_func (top, dst_fid);
2536 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2537 struct ipa_polymorphic_call_context *src_ctx
2538 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2539
2540 if (src_ctx && !src_ctx->useless_p ())
2541 {
2542 struct ipa_polymorphic_call_context ctx = *src_ctx;
2543
2544 /* TODO: Make type preserved safe WRT contexts. */
2545 if (!ipa_get_jf_pass_through_type_preserved (dst))
2546 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2547 if (!ctx.useless_p ())
2548 {
2549 if (!dst_ctx)
2550 {
2551 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2552 count);
2553 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2554 }
2555 dst_ctx->combine_with (ctx);
2556 }
2557 }
2558 switch (src->type)
2559 {
2560 case IPA_JF_UNKNOWN:
2561 ipa_set_jf_unknown (dst);
2562 break;
2563 case IPA_JF_CONST:
2564 ipa_set_jf_cst_copy (dst, src);
2565 break;
2566
2567 case IPA_JF_PASS_THROUGH:
2568 {
2569 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2570 enum tree_code operation;
2571 operation = ipa_get_jf_pass_through_operation (src);
2572
2573 if (operation == NOP_EXPR)
2574 {
2575 bool agg_p;
2576 agg_p = dst_agg_p
2577 && ipa_get_jf_pass_through_agg_preserved (src);
2578 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2579 }
2580 else
2581 {
2582 tree operand = ipa_get_jf_pass_through_operand (src);
2583 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2584 operation);
2585 }
2586 break;
2587 }
2588 case IPA_JF_ANCESTOR:
2589 {
2590 bool agg_p;
2591 agg_p = dst_agg_p
2592 && ipa_get_jf_ancestor_agg_preserved (src);
2593 ipa_set_ancestor_jf (dst,
2594 ipa_get_jf_ancestor_offset (src),
2595 ipa_get_jf_ancestor_formal_id (src),
2596 agg_p);
2597 break;
2598 }
2599 default:
2600 gcc_unreachable ();
2601 }
2602
2603 if (src->agg.items
2604 && (dst_agg_p || !src->agg.by_ref))
2605 {
2606 /* Currently we do not produce clobber aggregate jump
2607 functions, replace with merging when we do. */
2608 gcc_assert (!dst->agg.items);
2609
2610 dst->agg.by_ref = src->agg.by_ref;
2611 dst->agg.items = vec_safe_copy (src->agg.items);
2612 }
2613 }
2614 else
2615 ipa_set_jf_unknown (dst);
2616 }
2617 }
2618 }
2619
2620 /* If TARGET is an addr_expr of a function declaration, make it the
2621 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2622 Otherwise, return NULL. */
2623
2624 struct cgraph_edge *
2625 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2626 bool speculative)
2627 {
2628 struct cgraph_node *callee;
2629 struct inline_edge_summary *es = inline_edge_summary (ie);
2630 bool unreachable = false;
2631
2632 if (TREE_CODE (target) == ADDR_EXPR)
2633 target = TREE_OPERAND (target, 0);
2634 if (TREE_CODE (target) != FUNCTION_DECL)
2635 {
2636 target = canonicalize_constructor_val (target, NULL);
2637 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2638 {
2639 if (ie->indirect_info->member_ptr)
2640 /* Member pointer call that goes through a VMT lookup. */
2641 return NULL;
2642
2643 if (dump_enabled_p ())
2644 {
2645 location_t loc = gimple_location_safe (ie->call_stmt);
2646 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2647 "discovered direct call to non-function in %s/%i, "
2648 "making it __builtin_unreachable\n",
2649 ie->caller->name (), ie->caller->order);
2650 }
2651
2652 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2653 callee = cgraph_node::get_create (target);
2654 unreachable = true;
2655 }
2656 else
2657 callee = cgraph_node::get (target);
2658 }
2659 else
2660 callee = cgraph_node::get (target);
2661
2662 /* Because may-edges are not explicitely represented and vtable may be external,
2663 we may create the first reference to the object in the unit. */
2664 if (!callee || callee->global.inlined_to)
2665 {
2666
2667 /* We are better to ensure we can refer to it.
2668 In the case of static functions we are out of luck, since we already
2669 removed its body. In the case of public functions we may or may
2670 not introduce the reference. */
2671 if (!canonicalize_constructor_val (target, NULL)
2672 || !TREE_PUBLIC (target))
2673 {
2674 if (dump_file)
2675 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2676 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2677 xstrdup_for_dump (ie->caller->name ()),
2678 ie->caller->order,
2679 xstrdup_for_dump (ie->callee->name ()),
2680 ie->callee->order);
2681 return NULL;
2682 }
2683 callee = cgraph_node::get_create (target);
2684 }
2685
2686 /* If the edge is already speculated. */
2687 if (speculative && ie->speculative)
2688 {
2689 struct cgraph_edge *e2;
2690 struct ipa_ref *ref;
2691 ie->speculative_call_info (e2, ie, ref);
2692 if (e2->callee->ultimate_alias_target ()
2693 != callee->ultimate_alias_target ())
2694 {
2695 if (dump_file)
2696 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2697 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2698 xstrdup_for_dump (ie->caller->name ()),
2699 ie->caller->order,
2700 xstrdup_for_dump (callee->name ()),
2701 callee->order,
2702 xstrdup_for_dump (e2->callee->name ()),
2703 e2->callee->order);
2704 }
2705 else
2706 {
2707 if (dump_file)
2708 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2709 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2710 xstrdup_for_dump (ie->caller->name ()),
2711 ie->caller->order,
2712 xstrdup_for_dump (callee->name ()),
2713 callee->order);
2714 }
2715 return NULL;
2716 }
2717
2718 if (!dbg_cnt (devirt))
2719 return NULL;
2720
2721 ipa_check_create_node_params ();
2722
2723 /* We can not make edges to inline clones. It is bug that someone removed
2724 the cgraph node too early. */
2725 gcc_assert (!callee->global.inlined_to);
2726
2727 if (dump_file && !unreachable)
2728 {
2729 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2730 "(%s/%i -> %s/%i), for stmt ",
2731 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2732 speculative ? "speculative" : "known",
2733 xstrdup_for_dump (ie->caller->name ()),
2734 ie->caller->order,
2735 xstrdup_for_dump (callee->name ()),
2736 callee->order);
2737 if (ie->call_stmt)
2738 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2739 else
2740 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2741 }
2742 if (dump_enabled_p ())
2743 {
2744 location_t loc = gimple_location_safe (ie->call_stmt);
2745
2746 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2747 "converting indirect call in %s to direct call to %s\n",
2748 ie->caller->name (), callee->name ());
2749 }
2750 if (!speculative)
2751 {
2752 struct cgraph_edge *orig = ie;
2753 ie = ie->make_direct (callee);
2754 /* If we resolved speculative edge the cost is already up to date
2755 for direct call (adjusted by inline_edge_duplication_hook). */
2756 if (ie == orig)
2757 {
2758 es = inline_edge_summary (ie);
2759 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2760 - eni_size_weights.call_cost);
2761 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2762 - eni_time_weights.call_cost);
2763 }
2764 }
2765 else
2766 {
2767 if (!callee->can_be_discarded_p ())
2768 {
2769 cgraph_node *alias;
2770 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2771 if (alias)
2772 callee = alias;
2773 }
2774 /* make_speculative will update ie's cost to direct call cost. */
2775 ie = ie->make_speculative
2776 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2777 }
2778
2779 return ie;
2780 }
2781
2782 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2783 return NULL if there is not any. BY_REF specifies whether the value has to
2784 be passed by reference or by value. */
2785
2786 tree
2787 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2788 HOST_WIDE_INT offset, bool by_ref)
2789 {
2790 struct ipa_agg_jf_item *item;
2791 int i;
2792
2793 if (by_ref != agg->by_ref)
2794 return NULL;
2795
2796 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2797 if (item->offset == offset)
2798 {
2799 /* Currently we do not have clobber values, return NULL for them once
2800 we do. */
2801 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2802 return item->value;
2803 }
2804 return NULL;
2805 }
2806
2807 /* Remove a reference to SYMBOL from the list of references of a node given by
2808 reference description RDESC. Return true if the reference has been
2809 successfully found and removed. */
2810
2811 static bool
2812 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2813 {
2814 struct ipa_ref *to_del;
2815 struct cgraph_edge *origin;
2816
2817 origin = rdesc->cs;
2818 if (!origin)
2819 return false;
2820 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2821 origin->lto_stmt_uid);
2822 if (!to_del)
2823 return false;
2824
2825 to_del->remove_reference ();
2826 if (dump_file)
2827 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2828 xstrdup_for_dump (origin->caller->name ()),
2829 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2830 return true;
2831 }
2832
2833 /* If JFUNC has a reference description with refcount different from
2834 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2835 NULL. JFUNC must be a constant jump function. */
2836
2837 static struct ipa_cst_ref_desc *
2838 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2839 {
2840 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2841 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2842 return rdesc;
2843 else
2844 return NULL;
2845 }
2846
2847 /* If the value of constant jump function JFUNC is an address of a function
2848 declaration, return the associated call graph node. Otherwise return
2849 NULL. */
2850
2851 static cgraph_node *
2852 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2853 {
2854 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2855 tree cst = ipa_get_jf_constant (jfunc);
2856 if (TREE_CODE (cst) != ADDR_EXPR
2857 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2858 return NULL;
2859
2860 return cgraph_node::get (TREE_OPERAND (cst, 0));
2861 }
2862
2863
2864 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2865 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2866 the edge specified in the rdesc. Return false if either the symbol or the
2867 reference could not be found, otherwise return true. */
2868
2869 static bool
2870 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2871 {
2872 struct ipa_cst_ref_desc *rdesc;
2873 if (jfunc->type == IPA_JF_CONST
2874 && (rdesc = jfunc_rdesc_usable (jfunc))
2875 && --rdesc->refcount == 0)
2876 {
2877 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2878 if (!symbol)
2879 return false;
2880
2881 return remove_described_reference (symbol, rdesc);
2882 }
2883 return true;
2884 }
2885
2886 /* Try to find a destination for indirect edge IE that corresponds to a simple
2887 call or a call of a member function pointer and where the destination is a
2888 pointer formal parameter described by jump function JFUNC. If it can be
2889 determined, return the newly direct edge, otherwise return NULL.
2890 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2891
2892 static struct cgraph_edge *
2893 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2894 struct ipa_jump_func *jfunc,
2895 struct ipa_node_params *new_root_info)
2896 {
2897 struct cgraph_edge *cs;
2898 tree target;
2899 bool agg_contents = ie->indirect_info->agg_contents;
2900
2901 if (ie->indirect_info->agg_contents)
2902 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2903 ie->indirect_info->offset,
2904 ie->indirect_info->by_ref);
2905 else
2906 target = ipa_value_from_jfunc (new_root_info, jfunc);
2907 if (!target)
2908 return NULL;
2909 cs = ipa_make_edge_direct_to_target (ie, target);
2910
2911 if (cs && !agg_contents)
2912 {
2913 bool ok;
2914 gcc_checking_assert (cs->callee
2915 && (cs != ie
2916 || jfunc->type != IPA_JF_CONST
2917 || !cgraph_node_for_jfunc (jfunc)
2918 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2919 ok = try_decrement_rdesc_refcount (jfunc);
2920 gcc_checking_assert (ok);
2921 }
2922
2923 return cs;
2924 }
2925
2926 /* Return the target to be used in cases of impossible devirtualization. IE
2927 and target (the latter can be NULL) are dumped when dumping is enabled. */
2928
2929 tree
2930 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2931 {
2932 if (dump_file)
2933 {
2934 if (target)
2935 fprintf (dump_file,
2936 "Type inconsistent devirtualization: %s/%i->%s\n",
2937 ie->caller->name (), ie->caller->order,
2938 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2939 else
2940 fprintf (dump_file,
2941 "No devirtualization target in %s/%i\n",
2942 ie->caller->name (), ie->caller->order);
2943 }
2944 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2945 cgraph_node::get_create (new_target);
2946 return new_target;
2947 }
2948
2949 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2950 call based on a formal parameter which is described by jump function JFUNC
2951 and if it can be determined, make it direct and return the direct edge.
2952 Otherwise, return NULL. CTX describes the polymorphic context that the
2953 parameter the call is based on brings along with it. */
2954
2955 static struct cgraph_edge *
2956 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2957 struct ipa_jump_func *jfunc,
2958 struct ipa_polymorphic_call_context ctx)
2959 {
2960 tree target = NULL;
2961 bool speculative = false;
2962
2963 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2964 return NULL;
2965
2966 gcc_assert (!ie->indirect_info->by_ref);
2967
2968 /* Try to do lookup via known virtual table pointer value. */
2969 if (!ie->indirect_info->vptr_changed
2970 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
2971 {
2972 tree vtable;
2973 unsigned HOST_WIDE_INT offset;
2974 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2975 ie->indirect_info->offset,
2976 true);
2977 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2978 {
2979 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2980 vtable, offset);
2981 if (t)
2982 {
2983 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2984 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2985 || !possible_polymorphic_call_target_p
2986 (ie, cgraph_node::get (t)))
2987 {
2988 /* Do not speculate builtin_unreachable, it is stupid! */
2989 if (!ie->indirect_info->vptr_changed)
2990 target = ipa_impossible_devirt_target (ie, target);
2991 }
2992 else
2993 {
2994 target = t;
2995 speculative = ie->indirect_info->vptr_changed;
2996 }
2997 }
2998 }
2999 }
3000
3001 ipa_polymorphic_call_context ie_context (ie);
3002 vec <cgraph_node *>targets;
3003 bool final;
3004
3005 ctx.offset_by (ie->indirect_info->offset);
3006 if (ie->indirect_info->vptr_changed)
3007 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3008 ie->indirect_info->otr_type);
3009 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3010 targets = possible_polymorphic_call_targets
3011 (ie->indirect_info->otr_type,
3012 ie->indirect_info->otr_token,
3013 ctx, &final);
3014 if (final && targets.length () <= 1)
3015 {
3016 speculative = false;
3017 if (targets.length () == 1)
3018 target = targets[0]->decl;
3019 else
3020 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3021 }
3022 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3023 && !ie->speculative && ie->maybe_hot_p ())
3024 {
3025 cgraph_node *n;
3026 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3027 ie->indirect_info->otr_token,
3028 ie->indirect_info->context);
3029 if (n)
3030 {
3031 target = n->decl;
3032 speculative = true;
3033 }
3034 }
3035
3036 if (target)
3037 {
3038 if (!possible_polymorphic_call_target_p
3039 (ie, cgraph_node::get_create (target)))
3040 {
3041 if (speculative)
3042 return NULL;
3043 target = ipa_impossible_devirt_target (ie, target);
3044 }
3045 return ipa_make_edge_direct_to_target (ie, target, speculative);
3046 }
3047 else
3048 return NULL;
3049 }
3050
3051 /* Update the param called notes associated with NODE when CS is being inlined,
3052 assuming NODE is (potentially indirectly) inlined into CS->callee.
3053 Moreover, if the callee is discovered to be constant, create a new cgraph
3054 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3055 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3056
3057 static bool
3058 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3059 struct cgraph_node *node,
3060 vec<cgraph_edge *> *new_edges)
3061 {
3062 struct ipa_edge_args *top;
3063 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3064 struct ipa_node_params *new_root_info;
3065 bool res = false;
3066
3067 ipa_check_create_edge_args ();
3068 top = IPA_EDGE_REF (cs);
3069 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3070 ? cs->caller->global.inlined_to
3071 : cs->caller);
3072
3073 for (ie = node->indirect_calls; ie; ie = next_ie)
3074 {
3075 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3076 struct ipa_jump_func *jfunc;
3077 int param_index;
3078 cgraph_node *spec_target = NULL;
3079
3080 next_ie = ie->next_callee;
3081
3082 if (ici->param_index == -1)
3083 continue;
3084
3085 /* We must check range due to calls with variable number of arguments: */
3086 if (ici->param_index >= ipa_get_cs_argument_count (top))
3087 {
3088 ici->param_index = -1;
3089 continue;
3090 }
3091
3092 param_index = ici->param_index;
3093 jfunc = ipa_get_ith_jump_func (top, param_index);
3094
3095 if (ie->speculative)
3096 {
3097 struct cgraph_edge *de;
3098 struct ipa_ref *ref;
3099 ie->speculative_call_info (de, ie, ref);
3100 spec_target = de->callee;
3101 }
3102
3103 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3104 new_direct_edge = NULL;
3105 else if (ici->polymorphic)
3106 {
3107 ipa_polymorphic_call_context ctx;
3108 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3109 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3110 }
3111 else
3112 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3113 new_root_info);
3114 /* If speculation was removed, then we need to do nothing. */
3115 if (new_direct_edge && new_direct_edge != ie
3116 && new_direct_edge->callee == spec_target)
3117 {
3118 new_direct_edge->indirect_inlining_edge = 1;
3119 top = IPA_EDGE_REF (cs);
3120 res = true;
3121 if (!new_direct_edge->speculative)
3122 continue;
3123 }
3124 else if (new_direct_edge)
3125 {
3126 new_direct_edge->indirect_inlining_edge = 1;
3127 if (new_direct_edge->call_stmt)
3128 new_direct_edge->call_stmt_cannot_inline_p
3129 = !gimple_check_call_matching_types (
3130 new_direct_edge->call_stmt,
3131 new_direct_edge->callee->decl, false);
3132 if (new_edges)
3133 {
3134 new_edges->safe_push (new_direct_edge);
3135 res = true;
3136 }
3137 top = IPA_EDGE_REF (cs);
3138 /* If speculative edge was introduced we still need to update
3139 call info of the indirect edge. */
3140 if (!new_direct_edge->speculative)
3141 continue;
3142 }
3143 if (jfunc->type == IPA_JF_PASS_THROUGH
3144 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3145 {
3146 if ((ici->agg_contents
3147 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3148 || (ici->polymorphic
3149 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3150 ici->param_index = -1;
3151 else
3152 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3153 }
3154 else if (jfunc->type == IPA_JF_ANCESTOR)
3155 {
3156 if ((ici->agg_contents
3157 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3158 || (ici->polymorphic
3159 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3160 ici->param_index = -1;
3161 else
3162 {
3163 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3164 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3165 }
3166 }
3167 else
3168 /* Either we can find a destination for this edge now or never. */
3169 ici->param_index = -1;
3170 }
3171
3172 return res;
3173 }
3174
3175 /* Recursively traverse subtree of NODE (including node) made of inlined
3176 cgraph_edges when CS has been inlined and invoke
3177 update_indirect_edges_after_inlining on all nodes and
3178 update_jump_functions_after_inlining on all non-inlined edges that lead out
3179 of this subtree. Newly discovered indirect edges will be added to
3180 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3181 created. */
3182
3183 static bool
3184 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3185 struct cgraph_node *node,
3186 vec<cgraph_edge *> *new_edges)
3187 {
3188 struct cgraph_edge *e;
3189 bool res;
3190
3191 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3192
3193 for (e = node->callees; e; e = e->next_callee)
3194 if (!e->inline_failed)
3195 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3196 else
3197 update_jump_functions_after_inlining (cs, e);
3198 for (e = node->indirect_calls; e; e = e->next_callee)
3199 update_jump_functions_after_inlining (cs, e);
3200
3201 return res;
3202 }
3203
3204 /* Combine two controlled uses counts as done during inlining. */
3205
3206 static int
3207 combine_controlled_uses_counters (int c, int d)
3208 {
3209 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3210 return IPA_UNDESCRIBED_USE;
3211 else
3212 return c + d - 1;
3213 }
3214
3215 /* Propagate number of controlled users from CS->caleee to the new root of the
3216 tree of inlined nodes. */
3217
3218 static void
3219 propagate_controlled_uses (struct cgraph_edge *cs)
3220 {
3221 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3222 struct cgraph_node *new_root = cs->caller->global.inlined_to
3223 ? cs->caller->global.inlined_to : cs->caller;
3224 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3225 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3226 int count, i;
3227
3228 count = MIN (ipa_get_cs_argument_count (args),
3229 ipa_get_param_count (old_root_info));
3230 for (i = 0; i < count; i++)
3231 {
3232 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3233 struct ipa_cst_ref_desc *rdesc;
3234
3235 if (jf->type == IPA_JF_PASS_THROUGH)
3236 {
3237 int src_idx, c, d;
3238 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3239 c = ipa_get_controlled_uses (new_root_info, src_idx);
3240 d = ipa_get_controlled_uses (old_root_info, i);
3241
3242 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3243 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3244 c = combine_controlled_uses_counters (c, d);
3245 ipa_set_controlled_uses (new_root_info, src_idx, c);
3246 if (c == 0 && new_root_info->ipcp_orig_node)
3247 {
3248 struct cgraph_node *n;
3249 struct ipa_ref *ref;
3250 tree t = new_root_info->known_csts[src_idx];
3251
3252 if (t && TREE_CODE (t) == ADDR_EXPR
3253 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3254 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3255 && (ref = new_root->find_reference (n, NULL, 0)))
3256 {
3257 if (dump_file)
3258 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3259 "reference from %s/%i to %s/%i.\n",
3260 xstrdup_for_dump (new_root->name ()),
3261 new_root->order,
3262 xstrdup_for_dump (n->name ()), n->order);
3263 ref->remove_reference ();
3264 }
3265 }
3266 }
3267 else if (jf->type == IPA_JF_CONST
3268 && (rdesc = jfunc_rdesc_usable (jf)))
3269 {
3270 int d = ipa_get_controlled_uses (old_root_info, i);
3271 int c = rdesc->refcount;
3272 rdesc->refcount = combine_controlled_uses_counters (c, d);
3273 if (rdesc->refcount == 0)
3274 {
3275 tree cst = ipa_get_jf_constant (jf);
3276 struct cgraph_node *n;
3277 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3278 && TREE_CODE (TREE_OPERAND (cst, 0))
3279 == FUNCTION_DECL);
3280 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3281 if (n)
3282 {
3283 struct cgraph_node *clone;
3284 bool ok;
3285 ok = remove_described_reference (n, rdesc);
3286 gcc_checking_assert (ok);
3287
3288 clone = cs->caller;
3289 while (clone->global.inlined_to
3290 && clone != rdesc->cs->caller
3291 && IPA_NODE_REF (clone)->ipcp_orig_node)
3292 {
3293 struct ipa_ref *ref;
3294 ref = clone->find_reference (n, NULL, 0);
3295 if (ref)
3296 {
3297 if (dump_file)
3298 fprintf (dump_file, "ipa-prop: Removing "
3299 "cloning-created reference "
3300 "from %s/%i to %s/%i.\n",
3301 xstrdup_for_dump (clone->name ()),
3302 clone->order,
3303 xstrdup_for_dump (n->name ()),
3304 n->order);
3305 ref->remove_reference ();
3306 }
3307 clone = clone->callers->caller;
3308 }
3309 }
3310 }
3311 }
3312 }
3313
3314 for (i = ipa_get_param_count (old_root_info);
3315 i < ipa_get_cs_argument_count (args);
3316 i++)
3317 {
3318 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3319
3320 if (jf->type == IPA_JF_CONST)
3321 {
3322 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3323 if (rdesc)
3324 rdesc->refcount = IPA_UNDESCRIBED_USE;
3325 }
3326 else if (jf->type == IPA_JF_PASS_THROUGH)
3327 ipa_set_controlled_uses (new_root_info,
3328 jf->value.pass_through.formal_id,
3329 IPA_UNDESCRIBED_USE);
3330 }
3331 }
3332
3333 /* Update jump functions and call note functions on inlining the call site CS.
3334 CS is expected to lead to a node already cloned by
3335 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3336 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3337 created. */
3338
3339 bool
3340 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3341 vec<cgraph_edge *> *new_edges)
3342 {
3343 bool changed;
3344 /* Do nothing if the preparation phase has not been carried out yet
3345 (i.e. during early inlining). */
3346 if (!ipa_node_params_sum)
3347 return false;
3348 gcc_assert (ipa_edge_args_vector);
3349
3350 propagate_controlled_uses (cs);
3351 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3352
3353 return changed;
3354 }
3355
3356 /* Frees all dynamically allocated structures that the argument info points
3357 to. */
3358
3359 void
3360 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3361 {
3362 vec_free (args->jump_functions);
3363 memset (args, 0, sizeof (*args));
3364 }
3365
3366 /* Free all ipa_edge structures. */
3367
3368 void
3369 ipa_free_all_edge_args (void)
3370 {
3371 int i;
3372 struct ipa_edge_args *args;
3373
3374 if (!ipa_edge_args_vector)
3375 return;
3376
3377 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3378 ipa_free_edge_args_substructures (args);
3379
3380 vec_free (ipa_edge_args_vector);
3381 }
3382
3383 /* Frees all dynamically allocated structures that the param info points
3384 to. */
3385
3386 ipa_node_params::~ipa_node_params ()
3387 {
3388 descriptors.release ();
3389 free (lattices);
3390 /* Lattice values and their sources are deallocated with their alocation
3391 pool. */
3392 known_contexts.release ();
3393
3394 lattices = NULL;
3395 ipcp_orig_node = NULL;
3396 analysis_done = 0;
3397 node_enqueued = 0;
3398 do_clone_for_all_contexts = 0;
3399 is_all_contexts_clone = 0;
3400 node_dead = 0;
3401 }
3402
3403 /* Free all ipa_node_params structures. */
3404
3405 void
3406 ipa_free_all_node_params (void)
3407 {
3408 delete ipa_node_params_sum;
3409 ipa_node_params_sum = NULL;
3410 }
3411
3412 /* Grow ipcp_transformations if necessary. */
3413
3414 void
3415 ipcp_grow_transformations_if_necessary (void)
3416 {
3417 if (vec_safe_length (ipcp_transformations)
3418 <= (unsigned) symtab->cgraph_max_uid)
3419 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3420 }
3421
3422 /* Set the aggregate replacements of NODE to be AGGVALS. */
3423
3424 void
3425 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3426 struct ipa_agg_replacement_value *aggvals)
3427 {
3428 ipcp_grow_transformations_if_necessary ();
3429 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3430 }
3431
3432 /* Hook that is called by cgraph.c when an edge is removed. */
3433
3434 static void
3435 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3436 {
3437 struct ipa_edge_args *args;
3438
3439 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3440 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3441 return;
3442
3443 args = IPA_EDGE_REF (cs);
3444 if (args->jump_functions)
3445 {
3446 struct ipa_jump_func *jf;
3447 int i;
3448 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3449 {
3450 struct ipa_cst_ref_desc *rdesc;
3451 try_decrement_rdesc_refcount (jf);
3452 if (jf->type == IPA_JF_CONST
3453 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3454 && rdesc->cs == cs)
3455 rdesc->cs = NULL;
3456 }
3457 }
3458
3459 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3460 }
3461
3462 /* Hook that is called by cgraph.c when an edge is duplicated. */
3463
3464 static void
3465 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3466 void *)
3467 {
3468 struct ipa_edge_args *old_args, *new_args;
3469 unsigned int i;
3470
3471 ipa_check_create_edge_args ();
3472
3473 old_args = IPA_EDGE_REF (src);
3474 new_args = IPA_EDGE_REF (dst);
3475
3476 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3477 if (old_args->polymorphic_call_contexts)
3478 new_args->polymorphic_call_contexts
3479 = vec_safe_copy (old_args->polymorphic_call_contexts);
3480
3481 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3482 {
3483 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3484 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3485
3486 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3487
3488 if (src_jf->type == IPA_JF_CONST)
3489 {
3490 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3491
3492 if (!src_rdesc)
3493 dst_jf->value.constant.rdesc = NULL;
3494 else if (src->caller == dst->caller)
3495 {
3496 struct ipa_ref *ref;
3497 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3498 gcc_checking_assert (n);
3499 ref = src->caller->find_reference (n, src->call_stmt,
3500 src->lto_stmt_uid);
3501 gcc_checking_assert (ref);
3502 dst->caller->clone_reference (ref, ref->stmt);
3503
3504 gcc_checking_assert (ipa_refdesc_pool);
3505 struct ipa_cst_ref_desc *dst_rdesc
3506 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3507 dst_rdesc->cs = dst;
3508 dst_rdesc->refcount = src_rdesc->refcount;
3509 dst_rdesc->next_duplicate = NULL;
3510 dst_jf->value.constant.rdesc = dst_rdesc;
3511 }
3512 else if (src_rdesc->cs == src)
3513 {
3514 struct ipa_cst_ref_desc *dst_rdesc;
3515 gcc_checking_assert (ipa_refdesc_pool);
3516 dst_rdesc
3517 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3518 dst_rdesc->cs = dst;
3519 dst_rdesc->refcount = src_rdesc->refcount;
3520 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3521 src_rdesc->next_duplicate = dst_rdesc;
3522 dst_jf->value.constant.rdesc = dst_rdesc;
3523 }
3524 else
3525 {
3526 struct ipa_cst_ref_desc *dst_rdesc;
3527 /* This can happen during inlining, when a JFUNC can refer to a
3528 reference taken in a function up in the tree of inline clones.
3529 We need to find the duplicate that refers to our tree of
3530 inline clones. */
3531
3532 gcc_assert (dst->caller->global.inlined_to);
3533 for (dst_rdesc = src_rdesc->next_duplicate;
3534 dst_rdesc;
3535 dst_rdesc = dst_rdesc->next_duplicate)
3536 {
3537 struct cgraph_node *top;
3538 top = dst_rdesc->cs->caller->global.inlined_to
3539 ? dst_rdesc->cs->caller->global.inlined_to
3540 : dst_rdesc->cs->caller;
3541 if (dst->caller->global.inlined_to == top)
3542 break;
3543 }
3544 gcc_assert (dst_rdesc);
3545 dst_jf->value.constant.rdesc = dst_rdesc;
3546 }
3547 }
3548 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3549 && src->caller == dst->caller)
3550 {
3551 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3552 ? dst->caller->global.inlined_to : dst->caller;
3553 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3554 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3555
3556 int c = ipa_get_controlled_uses (root_info, idx);
3557 if (c != IPA_UNDESCRIBED_USE)
3558 {
3559 c++;
3560 ipa_set_controlled_uses (root_info, idx, c);
3561 }
3562 }
3563 }
3564 }
3565
3566 /* Analyze newly added function into callgraph. */
3567
3568 static void
3569 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3570 {
3571 if (node->has_gimple_body_p ())
3572 ipa_analyze_node (node);
3573 }
3574
3575 /* Hook that is called by summary when a node is duplicated. */
3576
3577 void
3578 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3579 ipa_node_params *old_info,
3580 ipa_node_params *new_info)
3581 {
3582 ipa_agg_replacement_value *old_av, *new_av;
3583
3584 new_info->descriptors = old_info->descriptors.copy ();
3585 new_info->lattices = NULL;
3586 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3587
3588 new_info->analysis_done = old_info->analysis_done;
3589 new_info->node_enqueued = old_info->node_enqueued;
3590
3591 old_av = ipa_get_agg_replacements_for_node (src);
3592 if (old_av)
3593 {
3594 new_av = NULL;
3595 while (old_av)
3596 {
3597 struct ipa_agg_replacement_value *v;
3598
3599 v = ggc_alloc<ipa_agg_replacement_value> ();
3600 memcpy (v, old_av, sizeof (*v));
3601 v->next = new_av;
3602 new_av = v;
3603 old_av = old_av->next;
3604 }
3605 ipa_set_node_agg_value_chain (dst, new_av);
3606 }
3607
3608 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3609
3610 if (src_trans && vec_safe_length (src_trans->alignments) > 0)
3611 {
3612 ipcp_grow_transformations_if_necessary ();
3613 src_trans = ipcp_get_transformation_summary (src);
3614 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3615 vec<ipa_alignment, va_gc> *&dst_alignments
3616 = ipcp_get_transformation_summary (dst)->alignments;
3617 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3618 for (unsigned i = 0; i < src_alignments->length (); ++i)
3619 dst_alignments->quick_push ((*src_alignments)[i]);
3620 }
3621 }
3622
3623 /* Register our cgraph hooks if they are not already there. */
3624
3625 void
3626 ipa_register_cgraph_hooks (void)
3627 {
3628 ipa_check_create_node_params ();
3629
3630 if (!edge_removal_hook_holder)
3631 edge_removal_hook_holder =
3632 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3633 if (!edge_duplication_hook_holder)
3634 edge_duplication_hook_holder =
3635 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3636 function_insertion_hook_holder =
3637 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3638 }
3639
3640 /* Unregister our cgraph hooks if they are not already there. */
3641
3642 static void
3643 ipa_unregister_cgraph_hooks (void)
3644 {
3645 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3646 edge_removal_hook_holder = NULL;
3647 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3648 edge_duplication_hook_holder = NULL;
3649 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3650 function_insertion_hook_holder = NULL;
3651 }
3652
3653 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3654 longer needed after ipa-cp. */
3655
3656 void
3657 ipa_free_all_structures_after_ipa_cp (void)
3658 {
3659 if (!optimize && !in_lto_p)
3660 {
3661 ipa_free_all_edge_args ();
3662 ipa_free_all_node_params ();
3663 free_alloc_pool (ipcp_sources_pool);
3664 free_alloc_pool (ipcp_cst_values_pool);
3665 free_alloc_pool (ipcp_poly_ctx_values_pool);
3666 free_alloc_pool (ipcp_agg_lattice_pool);
3667 ipa_unregister_cgraph_hooks ();
3668 if (ipa_refdesc_pool)
3669 free_alloc_pool (ipa_refdesc_pool);
3670 }
3671 }
3672
3673 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3674 longer needed after indirect inlining. */
3675
3676 void
3677 ipa_free_all_structures_after_iinln (void)
3678 {
3679 ipa_free_all_edge_args ();
3680 ipa_free_all_node_params ();
3681 ipa_unregister_cgraph_hooks ();
3682 if (ipcp_sources_pool)
3683 free_alloc_pool (ipcp_sources_pool);
3684 if (ipcp_cst_values_pool)
3685 free_alloc_pool (ipcp_cst_values_pool);
3686 if (ipcp_poly_ctx_values_pool)
3687 free_alloc_pool (ipcp_poly_ctx_values_pool);
3688 if (ipcp_agg_lattice_pool)
3689 free_alloc_pool (ipcp_agg_lattice_pool);
3690 if (ipa_refdesc_pool)
3691 free_alloc_pool (ipa_refdesc_pool);
3692 }
3693
3694 /* Print ipa_tree_map data structures of all functions in the
3695 callgraph to F. */
3696
3697 void
3698 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3699 {
3700 int i, count;
3701 struct ipa_node_params *info;
3702
3703 if (!node->definition)
3704 return;
3705 info = IPA_NODE_REF (node);
3706 fprintf (f, " function %s/%i parameter descriptors:\n",
3707 node->name (), node->order);
3708 count = ipa_get_param_count (info);
3709 for (i = 0; i < count; i++)
3710 {
3711 int c;
3712
3713 fprintf (f, " ");
3714 ipa_dump_param (f, info, i);
3715 if (ipa_is_param_used (info, i))
3716 fprintf (f, " used");
3717 c = ipa_get_controlled_uses (info, i);
3718 if (c == IPA_UNDESCRIBED_USE)
3719 fprintf (f, " undescribed_use");
3720 else
3721 fprintf (f, " controlled_uses=%i", c);
3722 fprintf (f, "\n");
3723 }
3724 }
3725
3726 /* Print ipa_tree_map data structures of all functions in the
3727 callgraph to F. */
3728
3729 void
3730 ipa_print_all_params (FILE * f)
3731 {
3732 struct cgraph_node *node;
3733
3734 fprintf (f, "\nFunction parameters:\n");
3735 FOR_EACH_FUNCTION (node)
3736 ipa_print_node_params (f, node);
3737 }
3738
3739 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3740
3741 vec<tree>
3742 ipa_get_vector_of_formal_parms (tree fndecl)
3743 {
3744 vec<tree> args;
3745 int count;
3746 tree parm;
3747
3748 gcc_assert (!flag_wpa);
3749 count = count_formal_params (fndecl);
3750 args.create (count);
3751 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3752 args.quick_push (parm);
3753
3754 return args;
3755 }
3756
3757 /* Return a heap allocated vector containing types of formal parameters of
3758 function type FNTYPE. */
3759
3760 vec<tree>
3761 ipa_get_vector_of_formal_parm_types (tree fntype)
3762 {
3763 vec<tree> types;
3764 int count = 0;
3765 tree t;
3766
3767 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3768 count++;
3769
3770 types.create (count);
3771 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3772 types.quick_push (TREE_VALUE (t));
3773
3774 return types;
3775 }
3776
3777 /* Modify the function declaration FNDECL and its type according to the plan in
3778 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3779 to reflect the actual parameters being modified which are determined by the
3780 base_index field. */
3781
3782 void
3783 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3784 {
3785 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3786 tree orig_type = TREE_TYPE (fndecl);
3787 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3788
3789 /* The following test is an ugly hack, some functions simply don't have any
3790 arguments in their type. This is probably a bug but well... */
3791 bool care_for_types = (old_arg_types != NULL_TREE);
3792 bool last_parm_void;
3793 vec<tree> otypes;
3794 if (care_for_types)
3795 {
3796 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3797 == void_type_node);
3798 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3799 if (last_parm_void)
3800 gcc_assert (oparms.length () + 1 == otypes.length ());
3801 else
3802 gcc_assert (oparms.length () == otypes.length ());
3803 }
3804 else
3805 {
3806 last_parm_void = false;
3807 otypes.create (0);
3808 }
3809
3810 int len = adjustments.length ();
3811 tree *link = &DECL_ARGUMENTS (fndecl);
3812 tree new_arg_types = NULL;
3813 for (int i = 0; i < len; i++)
3814 {
3815 struct ipa_parm_adjustment *adj;
3816 gcc_assert (link);
3817
3818 adj = &adjustments[i];
3819 tree parm;
3820 if (adj->op == IPA_PARM_OP_NEW)
3821 parm = NULL;
3822 else
3823 parm = oparms[adj->base_index];
3824 adj->base = parm;
3825
3826 if (adj->op == IPA_PARM_OP_COPY)
3827 {
3828 if (care_for_types)
3829 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3830 new_arg_types);
3831 *link = parm;
3832 link = &DECL_CHAIN (parm);
3833 }
3834 else if (adj->op != IPA_PARM_OP_REMOVE)
3835 {
3836 tree new_parm;
3837 tree ptype;
3838
3839 if (adj->by_ref)
3840 ptype = build_pointer_type (adj->type);
3841 else
3842 {
3843 ptype = adj->type;
3844 if (is_gimple_reg_type (ptype))
3845 {
3846 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3847 if (TYPE_ALIGN (ptype) < malign)
3848 ptype = build_aligned_type (ptype, malign);
3849 }
3850 }
3851
3852 if (care_for_types)
3853 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3854
3855 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3856 ptype);
3857 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3858 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3859 DECL_ARTIFICIAL (new_parm) = 1;
3860 DECL_ARG_TYPE (new_parm) = ptype;
3861 DECL_CONTEXT (new_parm) = fndecl;
3862 TREE_USED (new_parm) = 1;
3863 DECL_IGNORED_P (new_parm) = 1;
3864 layout_decl (new_parm, 0);
3865
3866 if (adj->op == IPA_PARM_OP_NEW)
3867 adj->base = NULL;
3868 else
3869 adj->base = parm;
3870 adj->new_decl = new_parm;
3871
3872 *link = new_parm;
3873 link = &DECL_CHAIN (new_parm);
3874 }
3875 }
3876
3877 *link = NULL_TREE;
3878
3879 tree new_reversed = NULL;
3880 if (care_for_types)
3881 {
3882 new_reversed = nreverse (new_arg_types);
3883 if (last_parm_void)
3884 {
3885 if (new_reversed)
3886 TREE_CHAIN (new_arg_types) = void_list_node;
3887 else
3888 new_reversed = void_list_node;
3889 }
3890 }
3891
3892 /* Use copy_node to preserve as much as possible from original type
3893 (debug info, attribute lists etc.)
3894 Exception is METHOD_TYPEs must have THIS argument.
3895 When we are asked to remove it, we need to build new FUNCTION_TYPE
3896 instead. */
3897 tree new_type = NULL;
3898 if (TREE_CODE (orig_type) != METHOD_TYPE
3899 || (adjustments[0].op == IPA_PARM_OP_COPY
3900 && adjustments[0].base_index == 0))
3901 {
3902 new_type = build_distinct_type_copy (orig_type);
3903 TYPE_ARG_TYPES (new_type) = new_reversed;
3904 }
3905 else
3906 {
3907 new_type
3908 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3909 new_reversed));
3910 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3911 DECL_VINDEX (fndecl) = NULL_TREE;
3912 }
3913
3914 /* When signature changes, we need to clear builtin info. */
3915 if (DECL_BUILT_IN (fndecl))
3916 {
3917 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3918 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3919 }
3920
3921 TREE_TYPE (fndecl) = new_type;
3922 DECL_VIRTUAL_P (fndecl) = 0;
3923 DECL_LANG_SPECIFIC (fndecl) = NULL;
3924 otypes.release ();
3925 oparms.release ();
3926 }
3927
3928 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3929 If this is a directly recursive call, CS must be NULL. Otherwise it must
3930 contain the corresponding call graph edge. */
3931
3932 void
3933 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3934 ipa_parm_adjustment_vec adjustments)
3935 {
3936 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3937 vec<tree> vargs;
3938 vec<tree, va_gc> **debug_args = NULL;
3939 gcall *new_stmt;
3940 gimple_stmt_iterator gsi, prev_gsi;
3941 tree callee_decl;
3942 int i, len;
3943
3944 len = adjustments.length ();
3945 vargs.create (len);
3946 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3947 current_node->remove_stmt_references (stmt);
3948
3949 gsi = gsi_for_stmt (stmt);
3950 prev_gsi = gsi;
3951 gsi_prev (&prev_gsi);
3952 for (i = 0; i < len; i++)
3953 {
3954 struct ipa_parm_adjustment *adj;
3955
3956 adj = &adjustments[i];
3957
3958 if (adj->op == IPA_PARM_OP_COPY)
3959 {
3960 tree arg = gimple_call_arg (stmt, adj->base_index);
3961
3962 vargs.quick_push (arg);
3963 }
3964 else if (adj->op != IPA_PARM_OP_REMOVE)
3965 {
3966 tree expr, base, off;
3967 location_t loc;
3968 unsigned int deref_align = 0;
3969 bool deref_base = false;
3970
3971 /* We create a new parameter out of the value of the old one, we can
3972 do the following kind of transformations:
3973
3974 - A scalar passed by reference is converted to a scalar passed by
3975 value. (adj->by_ref is false and the type of the original
3976 actual argument is a pointer to a scalar).
3977
3978 - A part of an aggregate is passed instead of the whole aggregate.
3979 The part can be passed either by value or by reference, this is
3980 determined by value of adj->by_ref. Moreover, the code below
3981 handles both situations when the original aggregate is passed by
3982 value (its type is not a pointer) and when it is passed by
3983 reference (it is a pointer to an aggregate).
3984
3985 When the new argument is passed by reference (adj->by_ref is true)
3986 it must be a part of an aggregate and therefore we form it by
3987 simply taking the address of a reference inside the original
3988 aggregate. */
3989
3990 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3991 base = gimple_call_arg (stmt, adj->base_index);
3992 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3993 : EXPR_LOCATION (base);
3994
3995 if (TREE_CODE (base) != ADDR_EXPR
3996 && POINTER_TYPE_P (TREE_TYPE (base)))
3997 off = build_int_cst (adj->alias_ptr_type,
3998 adj->offset / BITS_PER_UNIT);
3999 else
4000 {
4001 HOST_WIDE_INT base_offset;
4002 tree prev_base;
4003 bool addrof;
4004
4005 if (TREE_CODE (base) == ADDR_EXPR)
4006 {
4007 base = TREE_OPERAND (base, 0);
4008 addrof = true;
4009 }
4010 else
4011 addrof = false;
4012 prev_base = base;
4013 base = get_addr_base_and_unit_offset (base, &base_offset);
4014 /* Aggregate arguments can have non-invariant addresses. */
4015 if (!base)
4016 {
4017 base = build_fold_addr_expr (prev_base);
4018 off = build_int_cst (adj->alias_ptr_type,
4019 adj->offset / BITS_PER_UNIT);
4020 }
4021 else if (TREE_CODE (base) == MEM_REF)
4022 {
4023 if (!addrof)
4024 {
4025 deref_base = true;
4026 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4027 }
4028 off = build_int_cst (adj->alias_ptr_type,
4029 base_offset
4030 + adj->offset / BITS_PER_UNIT);
4031 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4032 off);
4033 base = TREE_OPERAND (base, 0);
4034 }
4035 else
4036 {
4037 off = build_int_cst (adj->alias_ptr_type,
4038 base_offset
4039 + adj->offset / BITS_PER_UNIT);
4040 base = build_fold_addr_expr (base);
4041 }
4042 }
4043
4044 if (!adj->by_ref)
4045 {
4046 tree type = adj->type;
4047 unsigned int align;
4048 unsigned HOST_WIDE_INT misalign;
4049
4050 if (deref_base)
4051 {
4052 align = deref_align;
4053 misalign = 0;
4054 }
4055 else
4056 {
4057 get_pointer_alignment_1 (base, &align, &misalign);
4058 if (TYPE_ALIGN (type) > align)
4059 align = TYPE_ALIGN (type);
4060 }
4061 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4062 * BITS_PER_UNIT);
4063 misalign = misalign & (align - 1);
4064 if (misalign != 0)
4065 align = (misalign & -misalign);
4066 if (align < TYPE_ALIGN (type))
4067 type = build_aligned_type (type, align);
4068 base = force_gimple_operand_gsi (&gsi, base,
4069 true, NULL, true, GSI_SAME_STMT);
4070 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4071 /* If expr is not a valid gimple call argument emit
4072 a load into a temporary. */
4073 if (is_gimple_reg_type (TREE_TYPE (expr)))
4074 {
4075 gimple tem = gimple_build_assign (NULL_TREE, expr);
4076 if (gimple_in_ssa_p (cfun))
4077 {
4078 gimple_set_vuse (tem, gimple_vuse (stmt));
4079 expr = make_ssa_name (TREE_TYPE (expr), tem);
4080 }
4081 else
4082 expr = create_tmp_reg (TREE_TYPE (expr));
4083 gimple_assign_set_lhs (tem, expr);
4084 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4085 }
4086 }
4087 else
4088 {
4089 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4090 expr = build_fold_addr_expr (expr);
4091 expr = force_gimple_operand_gsi (&gsi, expr,
4092 true, NULL, true, GSI_SAME_STMT);
4093 }
4094 vargs.quick_push (expr);
4095 }
4096 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4097 {
4098 unsigned int ix;
4099 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4100 gimple def_temp;
4101
4102 arg = gimple_call_arg (stmt, adj->base_index);
4103 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4104 {
4105 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4106 continue;
4107 arg = fold_convert_loc (gimple_location (stmt),
4108 TREE_TYPE (origin), arg);
4109 }
4110 if (debug_args == NULL)
4111 debug_args = decl_debug_args_insert (callee_decl);
4112 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4113 if (ddecl == origin)
4114 {
4115 ddecl = (**debug_args)[ix + 1];
4116 break;
4117 }
4118 if (ddecl == NULL)
4119 {
4120 ddecl = make_node (DEBUG_EXPR_DECL);
4121 DECL_ARTIFICIAL (ddecl) = 1;
4122 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4123 DECL_MODE (ddecl) = DECL_MODE (origin);
4124
4125 vec_safe_push (*debug_args, origin);
4126 vec_safe_push (*debug_args, ddecl);
4127 }
4128 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4129 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4130 }
4131 }
4132
4133 if (dump_file && (dump_flags & TDF_DETAILS))
4134 {
4135 fprintf (dump_file, "replacing stmt:");
4136 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4137 }
4138
4139 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4140 vargs.release ();
4141 if (gimple_call_lhs (stmt))
4142 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4143
4144 gimple_set_block (new_stmt, gimple_block (stmt));
4145 if (gimple_has_location (stmt))
4146 gimple_set_location (new_stmt, gimple_location (stmt));
4147 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4148 gimple_call_copy_flags (new_stmt, stmt);
4149 if (gimple_in_ssa_p (cfun))
4150 {
4151 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4152 if (gimple_vdef (stmt))
4153 {
4154 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4155 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4156 }
4157 }
4158
4159 if (dump_file && (dump_flags & TDF_DETAILS))
4160 {
4161 fprintf (dump_file, "with stmt:");
4162 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4163 fprintf (dump_file, "\n");
4164 }
4165 gsi_replace (&gsi, new_stmt, true);
4166 if (cs)
4167 cs->set_call_stmt (new_stmt);
4168 do
4169 {
4170 current_node->record_stmt_references (gsi_stmt (gsi));
4171 gsi_prev (&gsi);
4172 }
4173 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4174 }
4175
4176 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4177 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4178 specifies whether the function should care about type incompatibility the
4179 current and new expressions. If it is false, the function will leave
4180 incompatibility issues to the caller. Return true iff the expression
4181 was modified. */
4182
4183 bool
4184 ipa_modify_expr (tree *expr, bool convert,
4185 ipa_parm_adjustment_vec adjustments)
4186 {
4187 struct ipa_parm_adjustment *cand
4188 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4189 if (!cand)
4190 return false;
4191
4192 tree src;
4193 if (cand->by_ref)
4194 src = build_simple_mem_ref (cand->new_decl);
4195 else
4196 src = cand->new_decl;
4197
4198 if (dump_file && (dump_flags & TDF_DETAILS))
4199 {
4200 fprintf (dump_file, "About to replace expr ");
4201 print_generic_expr (dump_file, *expr, 0);
4202 fprintf (dump_file, " with ");
4203 print_generic_expr (dump_file, src, 0);
4204 fprintf (dump_file, "\n");
4205 }
4206
4207 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4208 {
4209 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4210 *expr = vce;
4211 }
4212 else
4213 *expr = src;
4214 return true;
4215 }
4216
4217 /* If T is an SSA_NAME, return NULL if it is not a default def or
4218 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4219 the base variable is always returned, regardless if it is a default
4220 def. Return T if it is not an SSA_NAME. */
4221
4222 static tree
4223 get_ssa_base_param (tree t, bool ignore_default_def)
4224 {
4225 if (TREE_CODE (t) == SSA_NAME)
4226 {
4227 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4228 return SSA_NAME_VAR (t);
4229 else
4230 return NULL_TREE;
4231 }
4232 return t;
4233 }
4234
4235 /* Given an expression, return an adjustment entry specifying the
4236 transformation to be done on EXPR. If no suitable adjustment entry
4237 was found, returns NULL.
4238
4239 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4240 default def, otherwise bail on them.
4241
4242 If CONVERT is non-NULL, this function will set *CONVERT if the
4243 expression provided is a component reference. ADJUSTMENTS is the
4244 adjustments vector. */
4245
4246 ipa_parm_adjustment *
4247 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4248 ipa_parm_adjustment_vec adjustments,
4249 bool ignore_default_def)
4250 {
4251 if (TREE_CODE (**expr) == BIT_FIELD_REF
4252 || TREE_CODE (**expr) == IMAGPART_EXPR
4253 || TREE_CODE (**expr) == REALPART_EXPR)
4254 {
4255 *expr = &TREE_OPERAND (**expr, 0);
4256 if (convert)
4257 *convert = true;
4258 }
4259
4260 HOST_WIDE_INT offset, size, max_size;
4261 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4262 if (!base || size == -1 || max_size == -1)
4263 return NULL;
4264
4265 if (TREE_CODE (base) == MEM_REF)
4266 {
4267 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4268 base = TREE_OPERAND (base, 0);
4269 }
4270
4271 base = get_ssa_base_param (base, ignore_default_def);
4272 if (!base || TREE_CODE (base) != PARM_DECL)
4273 return NULL;
4274
4275 struct ipa_parm_adjustment *cand = NULL;
4276 unsigned int len = adjustments.length ();
4277 for (unsigned i = 0; i < len; i++)
4278 {
4279 struct ipa_parm_adjustment *adj = &adjustments[i];
4280
4281 if (adj->base == base
4282 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4283 {
4284 cand = adj;
4285 break;
4286 }
4287 }
4288
4289 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4290 return NULL;
4291 return cand;
4292 }
4293
4294 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4295
4296 static bool
4297 index_in_adjustments_multiple_times_p (int base_index,
4298 ipa_parm_adjustment_vec adjustments)
4299 {
4300 int i, len = adjustments.length ();
4301 bool one = false;
4302
4303 for (i = 0; i < len; i++)
4304 {
4305 struct ipa_parm_adjustment *adj;
4306 adj = &adjustments[i];
4307
4308 if (adj->base_index == base_index)
4309 {
4310 if (one)
4311 return true;
4312 else
4313 one = true;
4314 }
4315 }
4316 return false;
4317 }
4318
4319
4320 /* Return adjustments that should have the same effect on function parameters
4321 and call arguments as if they were first changed according to adjustments in
4322 INNER and then by adjustments in OUTER. */
4323
4324 ipa_parm_adjustment_vec
4325 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4326 ipa_parm_adjustment_vec outer)
4327 {
4328 int i, outlen = outer.length ();
4329 int inlen = inner.length ();
4330 int removals = 0;
4331 ipa_parm_adjustment_vec adjustments, tmp;
4332
4333 tmp.create (inlen);
4334 for (i = 0; i < inlen; i++)
4335 {
4336 struct ipa_parm_adjustment *n;
4337 n = &inner[i];
4338
4339 if (n->op == IPA_PARM_OP_REMOVE)
4340 removals++;
4341 else
4342 {
4343 /* FIXME: Handling of new arguments are not implemented yet. */
4344 gcc_assert (n->op != IPA_PARM_OP_NEW);
4345 tmp.quick_push (*n);
4346 }
4347 }
4348
4349 adjustments.create (outlen + removals);
4350 for (i = 0; i < outlen; i++)
4351 {
4352 struct ipa_parm_adjustment r;
4353 struct ipa_parm_adjustment *out = &outer[i];
4354 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4355
4356 memset (&r, 0, sizeof (r));
4357 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4358 if (out->op == IPA_PARM_OP_REMOVE)
4359 {
4360 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4361 {
4362 r.op = IPA_PARM_OP_REMOVE;
4363 adjustments.quick_push (r);
4364 }
4365 continue;
4366 }
4367 else
4368 {
4369 /* FIXME: Handling of new arguments are not implemented yet. */
4370 gcc_assert (out->op != IPA_PARM_OP_NEW);
4371 }
4372
4373 r.base_index = in->base_index;
4374 r.type = out->type;
4375
4376 /* FIXME: Create nonlocal value too. */
4377
4378 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4379 r.op = IPA_PARM_OP_COPY;
4380 else if (in->op == IPA_PARM_OP_COPY)
4381 r.offset = out->offset;
4382 else if (out->op == IPA_PARM_OP_COPY)
4383 r.offset = in->offset;
4384 else
4385 r.offset = in->offset + out->offset;
4386 adjustments.quick_push (r);
4387 }
4388
4389 for (i = 0; i < inlen; i++)
4390 {
4391 struct ipa_parm_adjustment *n = &inner[i];
4392
4393 if (n->op == IPA_PARM_OP_REMOVE)
4394 adjustments.quick_push (*n);
4395 }
4396
4397 tmp.release ();
4398 return adjustments;
4399 }
4400
4401 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4402 friendly way, assuming they are meant to be applied to FNDECL. */
4403
4404 void
4405 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4406 tree fndecl)
4407 {
4408 int i, len = adjustments.length ();
4409 bool first = true;
4410 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4411
4412 fprintf (file, "IPA param adjustments: ");
4413 for (i = 0; i < len; i++)
4414 {
4415 struct ipa_parm_adjustment *adj;
4416 adj = &adjustments[i];
4417
4418 if (!first)
4419 fprintf (file, " ");
4420 else
4421 first = false;
4422
4423 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4424 print_generic_expr (file, parms[adj->base_index], 0);
4425 if (adj->base)
4426 {
4427 fprintf (file, ", base: ");
4428 print_generic_expr (file, adj->base, 0);
4429 }
4430 if (adj->new_decl)
4431 {
4432 fprintf (file, ", new_decl: ");
4433 print_generic_expr (file, adj->new_decl, 0);
4434 }
4435 if (adj->new_ssa_base)
4436 {
4437 fprintf (file, ", new_ssa_base: ");
4438 print_generic_expr (file, adj->new_ssa_base, 0);
4439 }
4440
4441 if (adj->op == IPA_PARM_OP_COPY)
4442 fprintf (file, ", copy_param");
4443 else if (adj->op == IPA_PARM_OP_REMOVE)
4444 fprintf (file, ", remove_param");
4445 else
4446 fprintf (file, ", offset %li", (long) adj->offset);
4447 if (adj->by_ref)
4448 fprintf (file, ", by_ref");
4449 print_node_brief (file, ", type: ", adj->type, 0);
4450 fprintf (file, "\n");
4451 }
4452 parms.release ();
4453 }
4454
4455 /* Dump the AV linked list. */
4456
4457 void
4458 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4459 {
4460 bool comma = false;
4461 fprintf (f, " Aggregate replacements:");
4462 for (; av; av = av->next)
4463 {
4464 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4465 av->index, av->offset);
4466 print_generic_expr (f, av->value, 0);
4467 comma = true;
4468 }
4469 fprintf (f, "\n");
4470 }
4471
4472 /* Stream out jump function JUMP_FUNC to OB. */
4473
4474 static void
4475 ipa_write_jump_function (struct output_block *ob,
4476 struct ipa_jump_func *jump_func)
4477 {
4478 struct ipa_agg_jf_item *item;
4479 struct bitpack_d bp;
4480 int i, count;
4481
4482 streamer_write_uhwi (ob, jump_func->type);
4483 switch (jump_func->type)
4484 {
4485 case IPA_JF_UNKNOWN:
4486 break;
4487 case IPA_JF_CONST:
4488 gcc_assert (
4489 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4490 stream_write_tree (ob, jump_func->value.constant.value, true);
4491 break;
4492 case IPA_JF_PASS_THROUGH:
4493 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4494 if (jump_func->value.pass_through.operation == NOP_EXPR)
4495 {
4496 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4497 bp = bitpack_create (ob->main_stream);
4498 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4499 streamer_write_bitpack (&bp);
4500 }
4501 else
4502 {
4503 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4504 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4505 }
4506 break;
4507 case IPA_JF_ANCESTOR:
4508 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4509 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4510 bp = bitpack_create (ob->main_stream);
4511 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4512 streamer_write_bitpack (&bp);
4513 break;
4514 }
4515
4516 count = vec_safe_length (jump_func->agg.items);
4517 streamer_write_uhwi (ob, count);
4518 if (count)
4519 {
4520 bp = bitpack_create (ob->main_stream);
4521 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4522 streamer_write_bitpack (&bp);
4523 }
4524
4525 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4526 {
4527 streamer_write_uhwi (ob, item->offset);
4528 stream_write_tree (ob, item->value, true);
4529 }
4530
4531 bp = bitpack_create (ob->main_stream);
4532 bp_pack_value (&bp, jump_func->alignment.known, 1);
4533 streamer_write_bitpack (&bp);
4534 if (jump_func->alignment.known)
4535 {
4536 streamer_write_uhwi (ob, jump_func->alignment.align);
4537 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4538 }
4539 }
4540
4541 /* Read in jump function JUMP_FUNC from IB. */
4542
4543 static void
4544 ipa_read_jump_function (struct lto_input_block *ib,
4545 struct ipa_jump_func *jump_func,
4546 struct cgraph_edge *cs,
4547 struct data_in *data_in)
4548 {
4549 enum jump_func_type jftype;
4550 enum tree_code operation;
4551 int i, count;
4552
4553 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4554 switch (jftype)
4555 {
4556 case IPA_JF_UNKNOWN:
4557 ipa_set_jf_unknown (jump_func);
4558 break;
4559 case IPA_JF_CONST:
4560 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4561 break;
4562 case IPA_JF_PASS_THROUGH:
4563 operation = (enum tree_code) streamer_read_uhwi (ib);
4564 if (operation == NOP_EXPR)
4565 {
4566 int formal_id = streamer_read_uhwi (ib);
4567 struct bitpack_d bp = streamer_read_bitpack (ib);
4568 bool agg_preserved = bp_unpack_value (&bp, 1);
4569 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4570 }
4571 else
4572 {
4573 tree operand = stream_read_tree (ib, data_in);
4574 int formal_id = streamer_read_uhwi (ib);
4575 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4576 operation);
4577 }
4578 break;
4579 case IPA_JF_ANCESTOR:
4580 {
4581 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4582 int formal_id = streamer_read_uhwi (ib);
4583 struct bitpack_d bp = streamer_read_bitpack (ib);
4584 bool agg_preserved = bp_unpack_value (&bp, 1);
4585 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4586 break;
4587 }
4588 }
4589
4590 count = streamer_read_uhwi (ib);
4591 vec_alloc (jump_func->agg.items, count);
4592 if (count)
4593 {
4594 struct bitpack_d bp = streamer_read_bitpack (ib);
4595 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4596 }
4597 for (i = 0; i < count; i++)
4598 {
4599 struct ipa_agg_jf_item item;
4600 item.offset = streamer_read_uhwi (ib);
4601 item.value = stream_read_tree (ib, data_in);
4602 jump_func->agg.items->quick_push (item);
4603 }
4604
4605 struct bitpack_d bp = streamer_read_bitpack (ib);
4606 bool alignment_known = bp_unpack_value (&bp, 1);
4607 if (alignment_known)
4608 {
4609 jump_func->alignment.known = true;
4610 jump_func->alignment.align = streamer_read_uhwi (ib);
4611 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4612 }
4613 else
4614 jump_func->alignment.known = false;
4615 }
4616
4617 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4618 relevant to indirect inlining to OB. */
4619
4620 static void
4621 ipa_write_indirect_edge_info (struct output_block *ob,
4622 struct cgraph_edge *cs)
4623 {
4624 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4625 struct bitpack_d bp;
4626
4627 streamer_write_hwi (ob, ii->param_index);
4628 bp = bitpack_create (ob->main_stream);
4629 bp_pack_value (&bp, ii->polymorphic, 1);
4630 bp_pack_value (&bp, ii->agg_contents, 1);
4631 bp_pack_value (&bp, ii->member_ptr, 1);
4632 bp_pack_value (&bp, ii->by_ref, 1);
4633 bp_pack_value (&bp, ii->vptr_changed, 1);
4634 streamer_write_bitpack (&bp);
4635 if (ii->agg_contents || ii->polymorphic)
4636 streamer_write_hwi (ob, ii->offset);
4637 else
4638 gcc_assert (ii->offset == 0);
4639
4640 if (ii->polymorphic)
4641 {
4642 streamer_write_hwi (ob, ii->otr_token);
4643 stream_write_tree (ob, ii->otr_type, true);
4644 ii->context.stream_out (ob);
4645 }
4646 }
4647
4648 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4649 relevant to indirect inlining from IB. */
4650
4651 static void
4652 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4653 struct data_in *data_in,
4654 struct cgraph_edge *cs)
4655 {
4656 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4657 struct bitpack_d bp;
4658
4659 ii->param_index = (int) streamer_read_hwi (ib);
4660 bp = streamer_read_bitpack (ib);
4661 ii->polymorphic = bp_unpack_value (&bp, 1);
4662 ii->agg_contents = bp_unpack_value (&bp, 1);
4663 ii->member_ptr = bp_unpack_value (&bp, 1);
4664 ii->by_ref = bp_unpack_value (&bp, 1);
4665 ii->vptr_changed = bp_unpack_value (&bp, 1);
4666 if (ii->agg_contents || ii->polymorphic)
4667 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4668 else
4669 ii->offset = 0;
4670 if (ii->polymorphic)
4671 {
4672 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4673 ii->otr_type = stream_read_tree (ib, data_in);
4674 ii->context.stream_in (ib, data_in);
4675 }
4676 }
4677
4678 /* Stream out NODE info to OB. */
4679
4680 static void
4681 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4682 {
4683 int node_ref;
4684 lto_symtab_encoder_t encoder;
4685 struct ipa_node_params *info = IPA_NODE_REF (node);
4686 int j;
4687 struct cgraph_edge *e;
4688 struct bitpack_d bp;
4689
4690 encoder = ob->decl_state->symtab_node_encoder;
4691 node_ref = lto_symtab_encoder_encode (encoder, node);
4692 streamer_write_uhwi (ob, node_ref);
4693
4694 streamer_write_uhwi (ob, ipa_get_param_count (info));
4695 for (j = 0; j < ipa_get_param_count (info); j++)
4696 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4697 bp = bitpack_create (ob->main_stream);
4698 gcc_assert (info->analysis_done
4699 || ipa_get_param_count (info) == 0);
4700 gcc_assert (!info->node_enqueued);
4701 gcc_assert (!info->ipcp_orig_node);
4702 for (j = 0; j < ipa_get_param_count (info); j++)
4703 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4704 streamer_write_bitpack (&bp);
4705 for (j = 0; j < ipa_get_param_count (info); j++)
4706 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4707 for (e = node->callees; e; e = e->next_callee)
4708 {
4709 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4710
4711 streamer_write_uhwi (ob,
4712 ipa_get_cs_argument_count (args) * 2
4713 + (args->polymorphic_call_contexts != NULL));
4714 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4715 {
4716 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4717 if (args->polymorphic_call_contexts != NULL)
4718 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4719 }
4720 }
4721 for (e = node->indirect_calls; e; e = e->next_callee)
4722 {
4723 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4724
4725 streamer_write_uhwi (ob,
4726 ipa_get_cs_argument_count (args) * 2
4727 + (args->polymorphic_call_contexts != NULL));
4728 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4729 {
4730 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4731 if (args->polymorphic_call_contexts != NULL)
4732 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4733 }
4734 ipa_write_indirect_edge_info (ob, e);
4735 }
4736 }
4737
4738 /* Stream in NODE info from IB. */
4739
4740 static void
4741 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4742 struct data_in *data_in)
4743 {
4744 struct ipa_node_params *info = IPA_NODE_REF (node);
4745 int k;
4746 struct cgraph_edge *e;
4747 struct bitpack_d bp;
4748
4749 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4750
4751 for (k = 0; k < ipa_get_param_count (info); k++)
4752 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4753
4754 bp = streamer_read_bitpack (ib);
4755 if (ipa_get_param_count (info) != 0)
4756 info->analysis_done = true;
4757 info->node_enqueued = false;
4758 for (k = 0; k < ipa_get_param_count (info); k++)
4759 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4760 for (k = 0; k < ipa_get_param_count (info); k++)
4761 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4762 for (e = node->callees; e; e = e->next_callee)
4763 {
4764 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4765 int count = streamer_read_uhwi (ib);
4766 bool contexts_computed = count & 1;
4767 count /= 2;
4768
4769 if (!count)
4770 continue;
4771 vec_safe_grow_cleared (args->jump_functions, count);
4772 if (contexts_computed)
4773 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4774
4775 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4776 {
4777 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4778 data_in);
4779 if (contexts_computed)
4780 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4781 }
4782 }
4783 for (e = node->indirect_calls; e; e = e->next_callee)
4784 {
4785 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4786 int count = streamer_read_uhwi (ib);
4787 bool contexts_computed = count & 1;
4788 count /= 2;
4789
4790 if (count)
4791 {
4792 vec_safe_grow_cleared (args->jump_functions, count);
4793 if (contexts_computed)
4794 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4795 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4796 {
4797 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4798 data_in);
4799 if (contexts_computed)
4800 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4801 }
4802 }
4803 ipa_read_indirect_edge_info (ib, data_in, e);
4804 }
4805 }
4806
4807 /* Write jump functions for nodes in SET. */
4808
4809 void
4810 ipa_prop_write_jump_functions (void)
4811 {
4812 struct cgraph_node *node;
4813 struct output_block *ob;
4814 unsigned int count = 0;
4815 lto_symtab_encoder_iterator lsei;
4816 lto_symtab_encoder_t encoder;
4817
4818 if (!ipa_node_params_sum)
4819 return;
4820
4821 ob = create_output_block (LTO_section_jump_functions);
4822 encoder = ob->decl_state->symtab_node_encoder;
4823 ob->symbol = NULL;
4824 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4825 lsei_next_function_in_partition (&lsei))
4826 {
4827 node = lsei_cgraph_node (lsei);
4828 if (node->has_gimple_body_p ()
4829 && IPA_NODE_REF (node) != NULL)
4830 count++;
4831 }
4832
4833 streamer_write_uhwi (ob, count);
4834
4835 /* Process all of the functions. */
4836 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4837 lsei_next_function_in_partition (&lsei))
4838 {
4839 node = lsei_cgraph_node (lsei);
4840 if (node->has_gimple_body_p ()
4841 && IPA_NODE_REF (node) != NULL)
4842 ipa_write_node_info (ob, node);
4843 }
4844 streamer_write_char_stream (ob->main_stream, 0);
4845 produce_asm (ob, NULL);
4846 destroy_output_block (ob);
4847 }
4848
4849 /* Read section in file FILE_DATA of length LEN with data DATA. */
4850
4851 static void
4852 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4853 size_t len)
4854 {
4855 const struct lto_function_header *header =
4856 (const struct lto_function_header *) data;
4857 const int cfg_offset = sizeof (struct lto_function_header);
4858 const int main_offset = cfg_offset + header->cfg_size;
4859 const int string_offset = main_offset + header->main_size;
4860 struct data_in *data_in;
4861 unsigned int i;
4862 unsigned int count;
4863
4864 lto_input_block ib_main ((const char *) data + main_offset,
4865 header->main_size);
4866
4867 data_in =
4868 lto_data_in_create (file_data, (const char *) data + string_offset,
4869 header->string_size, vNULL);
4870 count = streamer_read_uhwi (&ib_main);
4871
4872 for (i = 0; i < count; i++)
4873 {
4874 unsigned int index;
4875 struct cgraph_node *node;
4876 lto_symtab_encoder_t encoder;
4877
4878 index = streamer_read_uhwi (&ib_main);
4879 encoder = file_data->symtab_node_encoder;
4880 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4881 index));
4882 gcc_assert (node->definition);
4883 ipa_read_node_info (&ib_main, node, data_in);
4884 }
4885 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4886 len);
4887 lto_data_in_delete (data_in);
4888 }
4889
4890 /* Read ipcp jump functions. */
4891
4892 void
4893 ipa_prop_read_jump_functions (void)
4894 {
4895 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4896 struct lto_file_decl_data *file_data;
4897 unsigned int j = 0;
4898
4899 ipa_check_create_node_params ();
4900 ipa_check_create_edge_args ();
4901 ipa_register_cgraph_hooks ();
4902
4903 while ((file_data = file_data_vec[j++]))
4904 {
4905 size_t len;
4906 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4907
4908 if (data)
4909 ipa_prop_read_section (file_data, data, len);
4910 }
4911 }
4912
4913 /* After merging units, we can get mismatch in argument counts.
4914 Also decl merging might've rendered parameter lists obsolete.
4915 Also compute called_with_variable_arg info. */
4916
4917 void
4918 ipa_update_after_lto_read (void)
4919 {
4920 ipa_check_create_node_params ();
4921 ipa_check_create_edge_args ();
4922 }
4923
4924 void
4925 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4926 {
4927 int node_ref;
4928 unsigned int count = 0;
4929 lto_symtab_encoder_t encoder;
4930 struct ipa_agg_replacement_value *aggvals, *av;
4931
4932 aggvals = ipa_get_agg_replacements_for_node (node);
4933 encoder = ob->decl_state->symtab_node_encoder;
4934 node_ref = lto_symtab_encoder_encode (encoder, node);
4935 streamer_write_uhwi (ob, node_ref);
4936
4937 for (av = aggvals; av; av = av->next)
4938 count++;
4939 streamer_write_uhwi (ob, count);
4940
4941 for (av = aggvals; av; av = av->next)
4942 {
4943 struct bitpack_d bp;
4944
4945 streamer_write_uhwi (ob, av->offset);
4946 streamer_write_uhwi (ob, av->index);
4947 stream_write_tree (ob, av->value, true);
4948
4949 bp = bitpack_create (ob->main_stream);
4950 bp_pack_value (&bp, av->by_ref, 1);
4951 streamer_write_bitpack (&bp);
4952 }
4953
4954 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4955 if (ts && vec_safe_length (ts->alignments) > 0)
4956 {
4957 count = ts->alignments->length ();
4958
4959 streamer_write_uhwi (ob, count);
4960 for (unsigned i = 0; i < count; ++i)
4961 {
4962 ipa_alignment *parm_al = &(*ts->alignments)[i];
4963
4964 struct bitpack_d bp;
4965 bp = bitpack_create (ob->main_stream);
4966 bp_pack_value (&bp, parm_al->known, 1);
4967 streamer_write_bitpack (&bp);
4968 if (parm_al->known)
4969 {
4970 streamer_write_uhwi (ob, parm_al->align);
4971 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
4972 parm_al->misalign);
4973 }
4974 }
4975 }
4976 else
4977 streamer_write_uhwi (ob, 0);
4978 }
4979
4980 /* Stream in the aggregate value replacement chain for NODE from IB. */
4981
4982 static void
4983 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4984 data_in *data_in)
4985 {
4986 struct ipa_agg_replacement_value *aggvals = NULL;
4987 unsigned int count, i;
4988
4989 count = streamer_read_uhwi (ib);
4990 for (i = 0; i <count; i++)
4991 {
4992 struct ipa_agg_replacement_value *av;
4993 struct bitpack_d bp;
4994
4995 av = ggc_alloc<ipa_agg_replacement_value> ();
4996 av->offset = streamer_read_uhwi (ib);
4997 av->index = streamer_read_uhwi (ib);
4998 av->value = stream_read_tree (ib, data_in);
4999 bp = streamer_read_bitpack (ib);
5000 av->by_ref = bp_unpack_value (&bp, 1);
5001 av->next = aggvals;
5002 aggvals = av;
5003 }
5004 ipa_set_node_agg_value_chain (node, aggvals);
5005
5006 count = streamer_read_uhwi (ib);
5007 if (count > 0)
5008 {
5009 ipcp_grow_transformations_if_necessary ();
5010
5011 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5012 vec_safe_grow_cleared (ts->alignments, count);
5013
5014 for (i = 0; i < count; i++)
5015 {
5016 ipa_alignment *parm_al;
5017 parm_al = &(*ts->alignments)[i];
5018 struct bitpack_d bp;
5019 bp = streamer_read_bitpack (ib);
5020 parm_al->known = bp_unpack_value (&bp, 1);
5021 if (parm_al->known)
5022 {
5023 parm_al->align = streamer_read_uhwi (ib);
5024 parm_al->misalign
5025 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
5026 0, parm_al->align);
5027 }
5028 }
5029 }
5030 }
5031
5032 /* Write all aggregate replacement for nodes in set. */
5033
5034 void
5035 ipcp_write_transformation_summaries (void)
5036 {
5037 struct cgraph_node *node;
5038 struct output_block *ob;
5039 unsigned int count = 0;
5040 lto_symtab_encoder_iterator lsei;
5041 lto_symtab_encoder_t encoder;
5042
5043 ob = create_output_block (LTO_section_ipcp_transform);
5044 encoder = ob->decl_state->symtab_node_encoder;
5045 ob->symbol = NULL;
5046 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5047 lsei_next_function_in_partition (&lsei))
5048 {
5049 node = lsei_cgraph_node (lsei);
5050 if (node->has_gimple_body_p ())
5051 count++;
5052 }
5053
5054 streamer_write_uhwi (ob, count);
5055
5056 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5057 lsei_next_function_in_partition (&lsei))
5058 {
5059 node = lsei_cgraph_node (lsei);
5060 if (node->has_gimple_body_p ())
5061 write_ipcp_transformation_info (ob, node);
5062 }
5063 streamer_write_char_stream (ob->main_stream, 0);
5064 produce_asm (ob, NULL);
5065 destroy_output_block (ob);
5066 }
5067
5068 /* Read replacements section in file FILE_DATA of length LEN with data
5069 DATA. */
5070
5071 static void
5072 read_replacements_section (struct lto_file_decl_data *file_data,
5073 const char *data,
5074 size_t len)
5075 {
5076 const struct lto_function_header *header =
5077 (const struct lto_function_header *) data;
5078 const int cfg_offset = sizeof (struct lto_function_header);
5079 const int main_offset = cfg_offset + header->cfg_size;
5080 const int string_offset = main_offset + header->main_size;
5081 struct data_in *data_in;
5082 unsigned int i;
5083 unsigned int count;
5084
5085 lto_input_block ib_main ((const char *) data + main_offset,
5086 header->main_size);
5087
5088 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5089 header->string_size, vNULL);
5090 count = streamer_read_uhwi (&ib_main);
5091
5092 for (i = 0; i < count; i++)
5093 {
5094 unsigned int index;
5095 struct cgraph_node *node;
5096 lto_symtab_encoder_t encoder;
5097
5098 index = streamer_read_uhwi (&ib_main);
5099 encoder = file_data->symtab_node_encoder;
5100 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5101 index));
5102 gcc_assert (node->definition);
5103 read_ipcp_transformation_info (&ib_main, node, data_in);
5104 }
5105 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5106 len);
5107 lto_data_in_delete (data_in);
5108 }
5109
5110 /* Read IPA-CP aggregate replacements. */
5111
5112 void
5113 ipcp_read_transformation_summaries (void)
5114 {
5115 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5116 struct lto_file_decl_data *file_data;
5117 unsigned int j = 0;
5118
5119 while ((file_data = file_data_vec[j++]))
5120 {
5121 size_t len;
5122 const char *data = lto_get_section_data (file_data,
5123 LTO_section_ipcp_transform,
5124 NULL, &len);
5125 if (data)
5126 read_replacements_section (file_data, data, len);
5127 }
5128 }
5129
5130 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5131 NODE. */
5132
5133 static void
5134 adjust_agg_replacement_values (struct cgraph_node *node,
5135 struct ipa_agg_replacement_value *aggval)
5136 {
5137 struct ipa_agg_replacement_value *v;
5138 int i, c = 0, d = 0, *adj;
5139
5140 if (!node->clone.combined_args_to_skip)
5141 return;
5142
5143 for (v = aggval; v; v = v->next)
5144 {
5145 gcc_assert (v->index >= 0);
5146 if (c < v->index)
5147 c = v->index;
5148 }
5149 c++;
5150
5151 adj = XALLOCAVEC (int, c);
5152 for (i = 0; i < c; i++)
5153 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5154 {
5155 adj[i] = -1;
5156 d++;
5157 }
5158 else
5159 adj[i] = i - d;
5160
5161 for (v = aggval; v; v = v->next)
5162 v->index = adj[v->index];
5163 }
5164
5165 /* Dominator walker driving the ipcp modification phase. */
5166
5167 class ipcp_modif_dom_walker : public dom_walker
5168 {
5169 public:
5170 ipcp_modif_dom_walker (struct func_body_info *fbi,
5171 vec<ipa_param_descriptor> descs,
5172 struct ipa_agg_replacement_value *av,
5173 bool *sc, bool *cc)
5174 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5175 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5176
5177 virtual void before_dom_children (basic_block);
5178
5179 private:
5180 struct func_body_info *m_fbi;
5181 vec<ipa_param_descriptor> m_descriptors;
5182 struct ipa_agg_replacement_value *m_aggval;
5183 bool *m_something_changed, *m_cfg_changed;
5184 };
5185
5186 void
5187 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5188 {
5189 gimple_stmt_iterator gsi;
5190 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5191 {
5192 struct ipa_agg_replacement_value *v;
5193 gimple stmt = gsi_stmt (gsi);
5194 tree rhs, val, t;
5195 HOST_WIDE_INT offset, size;
5196 int index;
5197 bool by_ref, vce;
5198
5199 if (!gimple_assign_load_p (stmt))
5200 continue;
5201 rhs = gimple_assign_rhs1 (stmt);
5202 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5203 continue;
5204
5205 vce = false;
5206 t = rhs;
5207 while (handled_component_p (t))
5208 {
5209 /* V_C_E can do things like convert an array of integers to one
5210 bigger integer and similar things we do not handle below. */
5211 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5212 {
5213 vce = true;
5214 break;
5215 }
5216 t = TREE_OPERAND (t, 0);
5217 }
5218 if (vce)
5219 continue;
5220
5221 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5222 &offset, &size, &by_ref))
5223 continue;
5224 for (v = m_aggval; v; v = v->next)
5225 if (v->index == index
5226 && v->offset == offset)
5227 break;
5228 if (!v
5229 || v->by_ref != by_ref
5230 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5231 continue;
5232
5233 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5234 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5235 {
5236 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5237 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5238 else if (TYPE_SIZE (TREE_TYPE (rhs))
5239 == TYPE_SIZE (TREE_TYPE (v->value)))
5240 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5241 else
5242 {
5243 if (dump_file)
5244 {
5245 fprintf (dump_file, " const ");
5246 print_generic_expr (dump_file, v->value, 0);
5247 fprintf (dump_file, " can't be converted to type of ");
5248 print_generic_expr (dump_file, rhs, 0);
5249 fprintf (dump_file, "\n");
5250 }
5251 continue;
5252 }
5253 }
5254 else
5255 val = v->value;
5256
5257 if (dump_file && (dump_flags & TDF_DETAILS))
5258 {
5259 fprintf (dump_file, "Modifying stmt:\n ");
5260 print_gimple_stmt (dump_file, stmt, 0, 0);
5261 }
5262 gimple_assign_set_rhs_from_tree (&gsi, val);
5263 update_stmt (stmt);
5264
5265 if (dump_file && (dump_flags & TDF_DETAILS))
5266 {
5267 fprintf (dump_file, "into:\n ");
5268 print_gimple_stmt (dump_file, stmt, 0, 0);
5269 fprintf (dump_file, "\n");
5270 }
5271
5272 *m_something_changed = true;
5273 if (maybe_clean_eh_stmt (stmt)
5274 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5275 *m_cfg_changed = true;
5276 }
5277
5278 }
5279
5280 /* Update alignment of formal parameters as described in
5281 ipcp_transformation_summary. */
5282
5283 static void
5284 ipcp_update_alignments (struct cgraph_node *node)
5285 {
5286 tree fndecl = node->decl;
5287 tree parm = DECL_ARGUMENTS (fndecl);
5288 tree next_parm = parm;
5289 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5290 if (!ts || vec_safe_length (ts->alignments) == 0)
5291 return;
5292 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5293 unsigned count = alignments.length ();
5294
5295 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5296 {
5297 if (node->clone.combined_args_to_skip
5298 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5299 continue;
5300 gcc_checking_assert (parm);
5301 next_parm = DECL_CHAIN (parm);
5302
5303 if (!alignments[i].known || !is_gimple_reg (parm))
5304 continue;
5305 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5306 if (!ddef)
5307 continue;
5308
5309 if (dump_file)
5310 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5311 "misalignment to %u\n", i, alignments[i].align,
5312 alignments[i].misalign);
5313
5314 struct ptr_info_def *pi = get_ptr_info (ddef);
5315 gcc_checking_assert (pi);
5316 unsigned old_align;
5317 unsigned old_misalign;
5318 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5319
5320 if (old_known
5321 && old_align >= alignments[i].align)
5322 {
5323 if (dump_file)
5324 fprintf (dump_file, " But the alignment was already %u.\n",
5325 old_align);
5326 continue;
5327 }
5328 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5329 }
5330 }
5331
5332 /* IPCP transformation phase doing propagation of aggregate values. */
5333
5334 unsigned int
5335 ipcp_transform_function (struct cgraph_node *node)
5336 {
5337 vec<ipa_param_descriptor> descriptors = vNULL;
5338 struct func_body_info fbi;
5339 struct ipa_agg_replacement_value *aggval;
5340 int param_count;
5341 bool cfg_changed = false, something_changed = false;
5342
5343 gcc_checking_assert (cfun);
5344 gcc_checking_assert (current_function_decl);
5345
5346 if (dump_file)
5347 fprintf (dump_file, "Modification phase of node %s/%i\n",
5348 node->name (), node->order);
5349
5350 ipcp_update_alignments (node);
5351 aggval = ipa_get_agg_replacements_for_node (node);
5352 if (!aggval)
5353 return 0;
5354 param_count = count_formal_params (node->decl);
5355 if (param_count == 0)
5356 return 0;
5357 adjust_agg_replacement_values (node, aggval);
5358 if (dump_file)
5359 ipa_dump_agg_replacement_values (dump_file, aggval);
5360
5361 fbi.node = node;
5362 fbi.info = NULL;
5363 fbi.bb_infos = vNULL;
5364 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5365 fbi.param_count = param_count;
5366 fbi.aa_walked = 0;
5367
5368 descriptors.safe_grow_cleared (param_count);
5369 ipa_populate_param_decls (node, descriptors);
5370 calculate_dominance_info (CDI_DOMINATORS);
5371 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5372 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5373
5374 int i;
5375 struct ipa_bb_info *bi;
5376 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5377 free_ipa_bb_info (bi);
5378 fbi.bb_infos.release ();
5379 free_dominance_info (CDI_DOMINATORS);
5380 (*ipcp_transformations)[node->uid].agg_values = NULL;
5381 (*ipcp_transformations)[node->uid].alignments = NULL;
5382 descriptors.release ();
5383
5384 if (!something_changed)
5385 return 0;
5386 else if (cfg_changed)
5387 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5388 else
5389 return TODO_update_ssa_only_virtuals;
5390 }