Change use to type-based pool allocator in ipa-prop.c.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hash-set.h"
24 #include "machmode.h"
25 #include "vec.h"
26 #include "double-int.h"
27 #include "input.h"
28 #include "alias.h"
29 #include "symtab.h"
30 #include "options.h"
31 #include "wide-int.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "predict.h"
36 #include "tm.h"
37 #include "hard-reg-set.h"
38 #include "function.h"
39 #include "dominance.h"
40 #include "cfg.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "hashtab.h"
50 #include "rtl.h"
51 #include "flags.h"
52 #include "statistics.h"
53 #include "real.h"
54 #include "fixed-value.h"
55 #include "insn-config.h"
56 #include "expmed.h"
57 #include "dojump.h"
58 #include "explow.h"
59 #include "calls.h"
60 #include "emit-rtl.h"
61 #include "varasm.h"
62 #include "stmt.h"
63 #include "expr.h"
64 #include "stor-layout.h"
65 #include "print-tree.h"
66 #include "gimplify.h"
67 #include "gimple-iterator.h"
68 #include "gimplify-me.h"
69 #include "gimple-walk.h"
70 #include "langhooks.h"
71 #include "target.h"
72 #include "hash-map.h"
73 #include "plugin-api.h"
74 #include "ipa-ref.h"
75 #include "cgraph.h"
76 #include "alloc-pool.h"
77 #include "symbol-summary.h"
78 #include "ipa-prop.h"
79 #include "bitmap.h"
80 #include "gimple-ssa.h"
81 #include "tree-cfg.h"
82 #include "tree-phinodes.h"
83 #include "ssa-iterators.h"
84 #include "tree-into-ssa.h"
85 #include "tree-dfa.h"
86 #include "tree-pass.h"
87 #include "tree-inline.h"
88 #include "ipa-inline.h"
89 #include "diagnostic.h"
90 #include "gimple-pretty-print.h"
91 #include "lto-streamer.h"
92 #include "data-streamer.h"
93 #include "tree-streamer.h"
94 #include "params.h"
95 #include "ipa-utils.h"
96 #include "stringpool.h"
97 #include "tree-ssanames.h"
98 #include "dbgcnt.h"
99 #include "domwalk.h"
100 #include "builtins.h"
101
102 /* Intermediate information that we get from alias analysis about a particular
103 parameter in a particular basic_block. When a parameter or the memory it
104 references is marked modified, we use that information in all dominatd
105 blocks without cosulting alias analysis oracle. */
106
107 struct param_aa_status
108 {
109 /* Set when this structure contains meaningful information. If not, the
110 structure describing a dominating BB should be used instead. */
111 bool valid;
112
113 /* Whether we have seen something which might have modified the data in
114 question. PARM is for the parameter itself, REF is for data it points to
115 but using the alias type of individual accesses and PT is the same thing
116 but for computing aggregate pass-through functions using a very inclusive
117 ao_ref. */
118 bool parm_modified, ref_modified, pt_modified;
119 };
120
121 /* Information related to a given BB that used only when looking at function
122 body. */
123
124 struct ipa_bb_info
125 {
126 /* Call graph edges going out of this BB. */
127 vec<cgraph_edge *> cg_edges;
128 /* Alias analysis statuses of each formal parameter at this bb. */
129 vec<param_aa_status> param_aa_statuses;
130 };
131
132 /* Structure with global information that is only used when looking at function
133 body. */
134
135 struct func_body_info
136 {
137 /* The node that is being analyzed. */
138 cgraph_node *node;
139
140 /* Its info. */
141 struct ipa_node_params *info;
142
143 /* Information about individual BBs. */
144 vec<ipa_bb_info> bb_infos;
145
146 /* Number of parameters. */
147 int param_count;
148
149 /* Number of statements already walked by when analyzing this function. */
150 unsigned int aa_walked;
151 };
152
153 /* Function summary where the parameter infos are actually stored. */
154 ipa_node_params_t *ipa_node_params_sum = NULL;
155 /* Vector of IPA-CP transformation data for each clone. */
156 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
157 /* Vector where the parameter infos are actually stored. */
158 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
159
160 /* Holders of ipa cgraph hooks: */
161 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
162 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
163 static struct cgraph_node_hook_list *function_insertion_hook_holder;
164
165 /* Description of a reference to an IPA constant. */
166 struct ipa_cst_ref_desc
167 {
168 /* Edge that corresponds to the statement which took the reference. */
169 struct cgraph_edge *cs;
170 /* Linked list of duplicates created when call graph edges are cloned. */
171 struct ipa_cst_ref_desc *next_duplicate;
172 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
173 if out of control. */
174 int refcount;
175 };
176
177 /* Allocation pool for reference descriptions. */
178
179 static pool_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
180 ("IPA-PROP ref descriptions", 32);
181
182 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
183 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
184
185 static bool
186 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
187 {
188 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
189
190 if (!fs_opts)
191 return false;
192 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
193 }
194
195 /* Return index of the formal whose tree is PTREE in function which corresponds
196 to INFO. */
197
198 static int
199 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
200 {
201 int i, count;
202
203 count = descriptors.length ();
204 for (i = 0; i < count; i++)
205 if (descriptors[i].decl == ptree)
206 return i;
207
208 return -1;
209 }
210
211 /* Return index of the formal whose tree is PTREE in function which corresponds
212 to INFO. */
213
214 int
215 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
216 {
217 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
218 }
219
220 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
221 NODE. */
222
223 static void
224 ipa_populate_param_decls (struct cgraph_node *node,
225 vec<ipa_param_descriptor> &descriptors)
226 {
227 tree fndecl;
228 tree fnargs;
229 tree parm;
230 int param_num;
231
232 fndecl = node->decl;
233 gcc_assert (gimple_has_body_p (fndecl));
234 fnargs = DECL_ARGUMENTS (fndecl);
235 param_num = 0;
236 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
237 {
238 descriptors[param_num].decl = parm;
239 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
240 true);
241 param_num++;
242 }
243 }
244
245 /* Return how many formal parameters FNDECL has. */
246
247 int
248 count_formal_params (tree fndecl)
249 {
250 tree parm;
251 int count = 0;
252 gcc_assert (gimple_has_body_p (fndecl));
253
254 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
255 count++;
256
257 return count;
258 }
259
260 /* Return the declaration of Ith formal parameter of the function corresponding
261 to INFO. Note there is no setter function as this array is built just once
262 using ipa_initialize_node_params. */
263
264 void
265 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
266 {
267 fprintf (file, "param #%i", i);
268 if (info->descriptors[i].decl)
269 {
270 fprintf (file, " ");
271 print_generic_expr (file, info->descriptors[i].decl, 0);
272 }
273 }
274
275 /* Initialize the ipa_node_params structure associated with NODE
276 to hold PARAM_COUNT parameters. */
277
278 void
279 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
280 {
281 struct ipa_node_params *info = IPA_NODE_REF (node);
282
283 if (!info->descriptors.exists () && param_count)
284 info->descriptors.safe_grow_cleared (param_count);
285 }
286
287 /* Initialize the ipa_node_params structure associated with NODE by counting
288 the function parameters, creating the descriptors and populating their
289 param_decls. */
290
291 void
292 ipa_initialize_node_params (struct cgraph_node *node)
293 {
294 struct ipa_node_params *info = IPA_NODE_REF (node);
295
296 if (!info->descriptors.exists ())
297 {
298 ipa_alloc_node_params (node, count_formal_params (node->decl));
299 ipa_populate_param_decls (node, info->descriptors);
300 }
301 }
302
303 /* Print the jump functions associated with call graph edge CS to file F. */
304
305 static void
306 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
307 {
308 int i, count;
309
310 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
311 for (i = 0; i < count; i++)
312 {
313 struct ipa_jump_func *jump_func;
314 enum jump_func_type type;
315
316 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
317 type = jump_func->type;
318
319 fprintf (f, " param %d: ", i);
320 if (type == IPA_JF_UNKNOWN)
321 fprintf (f, "UNKNOWN\n");
322 else if (type == IPA_JF_CONST)
323 {
324 tree val = jump_func->value.constant.value;
325 fprintf (f, "CONST: ");
326 print_generic_expr (f, val, 0);
327 if (TREE_CODE (val) == ADDR_EXPR
328 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
329 {
330 fprintf (f, " -> ");
331 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
332 0);
333 }
334 fprintf (f, "\n");
335 }
336 else if (type == IPA_JF_PASS_THROUGH)
337 {
338 fprintf (f, "PASS THROUGH: ");
339 fprintf (f, "%d, op %s",
340 jump_func->value.pass_through.formal_id,
341 get_tree_code_name(jump_func->value.pass_through.operation));
342 if (jump_func->value.pass_through.operation != NOP_EXPR)
343 {
344 fprintf (f, " ");
345 print_generic_expr (f,
346 jump_func->value.pass_through.operand, 0);
347 }
348 if (jump_func->value.pass_through.agg_preserved)
349 fprintf (f, ", agg_preserved");
350 fprintf (f, "\n");
351 }
352 else if (type == IPA_JF_ANCESTOR)
353 {
354 fprintf (f, "ANCESTOR: ");
355 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
356 jump_func->value.ancestor.formal_id,
357 jump_func->value.ancestor.offset);
358 if (jump_func->value.ancestor.agg_preserved)
359 fprintf (f, ", agg_preserved");
360 fprintf (f, "\n");
361 }
362
363 if (jump_func->agg.items)
364 {
365 struct ipa_agg_jf_item *item;
366 int j;
367
368 fprintf (f, " Aggregate passed by %s:\n",
369 jump_func->agg.by_ref ? "reference" : "value");
370 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
371 {
372 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
373 item->offset);
374 if (TYPE_P (item->value))
375 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
376 tree_to_uhwi (TYPE_SIZE (item->value)));
377 else
378 {
379 fprintf (f, "cst: ");
380 print_generic_expr (f, item->value, 0);
381 }
382 fprintf (f, "\n");
383 }
384 }
385
386 struct ipa_polymorphic_call_context *ctx
387 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
388 if (ctx && !ctx->useless_p ())
389 {
390 fprintf (f, " Context: ");
391 ctx->dump (dump_file);
392 }
393
394 if (jump_func->alignment.known)
395 {
396 fprintf (f, " Alignment: %u, misalignment: %u\n",
397 jump_func->alignment.align,
398 jump_func->alignment.misalign);
399 }
400 else
401 fprintf (f, " Unknown alignment\n");
402 }
403 }
404
405
406 /* Print the jump functions of all arguments on all call graph edges going from
407 NODE to file F. */
408
409 void
410 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
411 {
412 struct cgraph_edge *cs;
413
414 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
415 node->order);
416 for (cs = node->callees; cs; cs = cs->next_callee)
417 {
418 if (!ipa_edge_args_info_available_for_edge_p (cs))
419 continue;
420
421 fprintf (f, " callsite %s/%i -> %s/%i : \n",
422 xstrdup_for_dump (node->name ()), node->order,
423 xstrdup_for_dump (cs->callee->name ()),
424 cs->callee->order);
425 ipa_print_node_jump_functions_for_edge (f, cs);
426 }
427
428 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
429 {
430 struct cgraph_indirect_call_info *ii;
431 if (!ipa_edge_args_info_available_for_edge_p (cs))
432 continue;
433
434 ii = cs->indirect_info;
435 if (ii->agg_contents)
436 fprintf (f, " indirect %s callsite, calling param %i, "
437 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
438 ii->member_ptr ? "member ptr" : "aggregate",
439 ii->param_index, ii->offset,
440 ii->by_ref ? "by reference" : "by_value");
441 else
442 fprintf (f, " indirect %s callsite, calling param %i, "
443 "offset " HOST_WIDE_INT_PRINT_DEC,
444 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
445 ii->offset);
446
447 if (cs->call_stmt)
448 {
449 fprintf (f, ", for stmt ");
450 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
451 }
452 else
453 fprintf (f, "\n");
454 if (ii->polymorphic)
455 ii->context.dump (f);
456 ipa_print_node_jump_functions_for_edge (f, cs);
457 }
458 }
459
460 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
461
462 void
463 ipa_print_all_jump_functions (FILE *f)
464 {
465 struct cgraph_node *node;
466
467 fprintf (f, "\nJump functions:\n");
468 FOR_EACH_FUNCTION (node)
469 {
470 ipa_print_node_jump_functions (f, node);
471 }
472 }
473
474 /* Set jfunc to be a know-really nothing jump function. */
475
476 static void
477 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
478 {
479 jfunc->type = IPA_JF_UNKNOWN;
480 jfunc->alignment.known = false;
481 }
482
483 /* Set JFUNC to be a copy of another jmp (to be used by jump function
484 combination code). The two functions will share their rdesc. */
485
486 static void
487 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
488 struct ipa_jump_func *src)
489
490 {
491 gcc_checking_assert (src->type == IPA_JF_CONST);
492 dst->type = IPA_JF_CONST;
493 dst->value.constant = src->value.constant;
494 }
495
496 /* Set JFUNC to be a constant jmp function. */
497
498 static void
499 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
500 struct cgraph_edge *cs)
501 {
502 constant = unshare_expr (constant);
503 if (constant && EXPR_P (constant))
504 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
505 jfunc->type = IPA_JF_CONST;
506 jfunc->value.constant.value = unshare_expr_without_location (constant);
507
508 if (TREE_CODE (constant) == ADDR_EXPR
509 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
510 {
511 struct ipa_cst_ref_desc *rdesc;
512
513 rdesc = ipa_refdesc_pool.allocate ();
514 rdesc->cs = cs;
515 rdesc->next_duplicate = NULL;
516 rdesc->refcount = 1;
517 jfunc->value.constant.rdesc = rdesc;
518 }
519 else
520 jfunc->value.constant.rdesc = NULL;
521 }
522
523 /* Set JFUNC to be a simple pass-through jump function. */
524 static void
525 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
526 bool agg_preserved)
527 {
528 jfunc->type = IPA_JF_PASS_THROUGH;
529 jfunc->value.pass_through.operand = NULL_TREE;
530 jfunc->value.pass_through.formal_id = formal_id;
531 jfunc->value.pass_through.operation = NOP_EXPR;
532 jfunc->value.pass_through.agg_preserved = agg_preserved;
533 }
534
535 /* Set JFUNC to be an arithmetic pass through jump function. */
536
537 static void
538 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
539 tree operand, enum tree_code operation)
540 {
541 jfunc->type = IPA_JF_PASS_THROUGH;
542 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
543 jfunc->value.pass_through.formal_id = formal_id;
544 jfunc->value.pass_through.operation = operation;
545 jfunc->value.pass_through.agg_preserved = false;
546 }
547
548 /* Set JFUNC to be an ancestor jump function. */
549
550 static void
551 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
552 int formal_id, bool agg_preserved)
553 {
554 jfunc->type = IPA_JF_ANCESTOR;
555 jfunc->value.ancestor.formal_id = formal_id;
556 jfunc->value.ancestor.offset = offset;
557 jfunc->value.ancestor.agg_preserved = agg_preserved;
558 }
559
560 /* Get IPA BB information about the given BB. FBI is the context of analyzis
561 of this function body. */
562
563 static struct ipa_bb_info *
564 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
565 {
566 gcc_checking_assert (fbi);
567 return &fbi->bb_infos[bb->index];
568 }
569
570 /* Structure to be passed in between detect_type_change and
571 check_stmt_for_type_change. */
572
573 struct prop_type_change_info
574 {
575 /* Offset into the object where there is the virtual method pointer we are
576 looking for. */
577 HOST_WIDE_INT offset;
578 /* The declaration or SSA_NAME pointer of the base that we are checking for
579 type change. */
580 tree object;
581 /* Set to true if dynamic type change has been detected. */
582 bool type_maybe_changed;
583 };
584
585 /* Return true if STMT can modify a virtual method table pointer.
586
587 This function makes special assumptions about both constructors and
588 destructors which are all the functions that are allowed to alter the VMT
589 pointers. It assumes that destructors begin with assignment into all VMT
590 pointers and that constructors essentially look in the following way:
591
592 1) The very first thing they do is that they call constructors of ancestor
593 sub-objects that have them.
594
595 2) Then VMT pointers of this and all its ancestors is set to new values
596 corresponding to the type corresponding to the constructor.
597
598 3) Only afterwards, other stuff such as constructor of member sub-objects
599 and the code written by the user is run. Only this may include calling
600 virtual functions, directly or indirectly.
601
602 There is no way to call a constructor of an ancestor sub-object in any
603 other way.
604
605 This means that we do not have to care whether constructors get the correct
606 type information because they will always change it (in fact, if we define
607 the type to be given by the VMT pointer, it is undefined).
608
609 The most important fact to derive from the above is that if, for some
610 statement in the section 3, we try to detect whether the dynamic type has
611 changed, we can safely ignore all calls as we examine the function body
612 backwards until we reach statements in section 2 because these calls cannot
613 be ancestor constructors or destructors (if the input is not bogus) and so
614 do not change the dynamic type (this holds true only for automatically
615 allocated objects but at the moment we devirtualize only these). We then
616 must detect that statements in section 2 change the dynamic type and can try
617 to derive the new type. That is enough and we can stop, we will never see
618 the calls into constructors of sub-objects in this code. Therefore we can
619 safely ignore all call statements that we traverse.
620 */
621
622 static bool
623 stmt_may_be_vtbl_ptr_store (gimple stmt)
624 {
625 if (is_gimple_call (stmt))
626 return false;
627 if (gimple_clobber_p (stmt))
628 return false;
629 else if (is_gimple_assign (stmt))
630 {
631 tree lhs = gimple_assign_lhs (stmt);
632
633 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
634 {
635 if (flag_strict_aliasing
636 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
637 return false;
638
639 if (TREE_CODE (lhs) == COMPONENT_REF
640 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
641 return false;
642 /* In the future we might want to use get_base_ref_and_offset to find
643 if there is a field corresponding to the offset and if so, proceed
644 almost like if it was a component ref. */
645 }
646 }
647 return true;
648 }
649
650 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
651 to check whether a particular statement may modify the virtual table
652 pointerIt stores its result into DATA, which points to a
653 prop_type_change_info structure. */
654
655 static bool
656 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
657 {
658 gimple stmt = SSA_NAME_DEF_STMT (vdef);
659 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
660
661 if (stmt_may_be_vtbl_ptr_store (stmt))
662 {
663 tci->type_maybe_changed = true;
664 return true;
665 }
666 else
667 return false;
668 }
669
670 /* See if ARG is PARAM_DECl describing instance passed by pointer
671 or reference in FUNCTION. Return false if the dynamic type may change
672 in between beggining of the function until CALL is invoked.
673
674 Generally functions are not allowed to change type of such instances,
675 but they call destructors. We assume that methods can not destroy the THIS
676 pointer. Also as a special cases, constructor and destructors may change
677 type of the THIS pointer. */
678
679 static bool
680 param_type_may_change_p (tree function, tree arg, gimple call)
681 {
682 /* Pure functions can not do any changes on the dynamic type;
683 that require writting to memory. */
684 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
685 return false;
686 /* We need to check if we are within inlined consturctor
687 or destructor (ideally we would have way to check that the
688 inline cdtor is actually working on ARG, but we don't have
689 easy tie on this, so punt on all non-pure cdtors.
690 We may also record the types of cdtors and once we know type
691 of the instance match them.
692
693 Also code unification optimizations may merge calls from
694 different blocks making return values unreliable. So
695 do nothing during late optimization. */
696 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
697 return true;
698 if (TREE_CODE (arg) == SSA_NAME
699 && SSA_NAME_IS_DEFAULT_DEF (arg)
700 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
701 {
702 /* Normal (non-THIS) argument. */
703 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
704 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
705 /* THIS pointer of an method - here we we want to watch constructors
706 and destructors as those definitely may change the dynamic
707 type. */
708 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
709 && !DECL_CXX_CONSTRUCTOR_P (function)
710 && !DECL_CXX_DESTRUCTOR_P (function)
711 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
712 {
713 /* Walk the inline stack and watch out for ctors/dtors. */
714 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
715 block = BLOCK_SUPERCONTEXT (block))
716 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
717 return true;
718 return false;
719 }
720 }
721 return true;
722 }
723
724 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
725 callsite CALL) by looking for assignments to its virtual table pointer. If
726 it is, return true and fill in the jump function JFUNC with relevant type
727 information or set it to unknown. ARG is the object itself (not a pointer
728 to it, unless dereferenced). BASE is the base of the memory access as
729 returned by get_ref_base_and_extent, as is the offset.
730
731 This is helper function for detect_type_change and detect_type_change_ssa
732 that does the heavy work which is usually unnecesary. */
733
734 static bool
735 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
736 gcall *call, struct ipa_jump_func *jfunc,
737 HOST_WIDE_INT offset)
738 {
739 struct prop_type_change_info tci;
740 ao_ref ao;
741 bool entry_reached = false;
742
743 gcc_checking_assert (DECL_P (arg)
744 || TREE_CODE (arg) == MEM_REF
745 || handled_component_p (arg));
746
747 comp_type = TYPE_MAIN_VARIANT (comp_type);
748
749 /* Const calls cannot call virtual methods through VMT and so type changes do
750 not matter. */
751 if (!flag_devirtualize || !gimple_vuse (call)
752 /* Be sure expected_type is polymorphic. */
753 || !comp_type
754 || TREE_CODE (comp_type) != RECORD_TYPE
755 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
756 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
757 return true;
758
759 ao_ref_init (&ao, arg);
760 ao.base = base;
761 ao.offset = offset;
762 ao.size = POINTER_SIZE;
763 ao.max_size = ao.size;
764
765 tci.offset = offset;
766 tci.object = get_base_address (arg);
767 tci.type_maybe_changed = false;
768
769 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
770 &tci, NULL, &entry_reached);
771 if (!tci.type_maybe_changed)
772 return false;
773
774 ipa_set_jf_unknown (jfunc);
775 return true;
776 }
777
778 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
779 If it is, return true and fill in the jump function JFUNC with relevant type
780 information or set it to unknown. ARG is the object itself (not a pointer
781 to it, unless dereferenced). BASE is the base of the memory access as
782 returned by get_ref_base_and_extent, as is the offset. */
783
784 static bool
785 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
786 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
787 {
788 if (!flag_devirtualize)
789 return false;
790
791 if (TREE_CODE (base) == MEM_REF
792 && !param_type_may_change_p (current_function_decl,
793 TREE_OPERAND (base, 0),
794 call))
795 return false;
796 return detect_type_change_from_memory_writes (arg, base, comp_type,
797 call, jfunc, offset);
798 }
799
800 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
801 SSA name (its dereference will become the base and the offset is assumed to
802 be zero). */
803
804 static bool
805 detect_type_change_ssa (tree arg, tree comp_type,
806 gcall *call, struct ipa_jump_func *jfunc)
807 {
808 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
809 if (!flag_devirtualize
810 || !POINTER_TYPE_P (TREE_TYPE (arg)))
811 return false;
812
813 if (!param_type_may_change_p (current_function_decl, arg, call))
814 return false;
815
816 arg = build2 (MEM_REF, ptr_type_node, arg,
817 build_int_cst (ptr_type_node, 0));
818
819 return detect_type_change_from_memory_writes (arg, arg, comp_type,
820 call, jfunc, 0);
821 }
822
823 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
824 boolean variable pointed to by DATA. */
825
826 static bool
827 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
828 void *data)
829 {
830 bool *b = (bool *) data;
831 *b = true;
832 return true;
833 }
834
835 /* Return true if we have already walked so many statements in AA that we
836 should really just start giving up. */
837
838 static bool
839 aa_overwalked (struct func_body_info *fbi)
840 {
841 gcc_checking_assert (fbi);
842 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
843 }
844
845 /* Find the nearest valid aa status for parameter specified by INDEX that
846 dominates BB. */
847
848 static struct param_aa_status *
849 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
850 int index)
851 {
852 while (true)
853 {
854 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
855 if (!bb)
856 return NULL;
857 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
858 if (!bi->param_aa_statuses.is_empty ()
859 && bi->param_aa_statuses[index].valid)
860 return &bi->param_aa_statuses[index];
861 }
862 }
863
864 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
865 structures and/or intialize the result with a dominating description as
866 necessary. */
867
868 static struct param_aa_status *
869 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
870 int index)
871 {
872 gcc_checking_assert (fbi);
873 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
874 if (bi->param_aa_statuses.is_empty ())
875 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
876 struct param_aa_status *paa = &bi->param_aa_statuses[index];
877 if (!paa->valid)
878 {
879 gcc_checking_assert (!paa->parm_modified
880 && !paa->ref_modified
881 && !paa->pt_modified);
882 struct param_aa_status *dom_paa;
883 dom_paa = find_dominating_aa_status (fbi, bb, index);
884 if (dom_paa)
885 *paa = *dom_paa;
886 else
887 paa->valid = true;
888 }
889
890 return paa;
891 }
892
893 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
894 a value known not to be modified in this function before reaching the
895 statement STMT. FBI holds information about the function we have so far
896 gathered but do not survive the summary building stage. */
897
898 static bool
899 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
900 gimple stmt, tree parm_load)
901 {
902 struct param_aa_status *paa;
903 bool modified = false;
904 ao_ref refd;
905
906 /* FIXME: FBI can be NULL if we are being called from outside
907 ipa_node_analysis or ipcp_transform_function, which currently happens
908 during inlining analysis. It would be great to extend fbi's lifetime and
909 always have it. Currently, we are just not afraid of too much walking in
910 that case. */
911 if (fbi)
912 {
913 if (aa_overwalked (fbi))
914 return false;
915 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
916 if (paa->parm_modified)
917 return false;
918 }
919 else
920 paa = NULL;
921
922 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
923 ao_ref_init (&refd, parm_load);
924 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
925 &modified, NULL);
926 if (fbi)
927 fbi->aa_walked += walked;
928 if (paa && modified)
929 paa->parm_modified = true;
930 return !modified;
931 }
932
933 /* If STMT is an assignment that loads a value from an parameter declaration,
934 return the index of the parameter in ipa_node_params which has not been
935 modified. Otherwise return -1. */
936
937 static int
938 load_from_unmodified_param (struct func_body_info *fbi,
939 vec<ipa_param_descriptor> descriptors,
940 gimple stmt)
941 {
942 int index;
943 tree op1;
944
945 if (!gimple_assign_single_p (stmt))
946 return -1;
947
948 op1 = gimple_assign_rhs1 (stmt);
949 if (TREE_CODE (op1) != PARM_DECL)
950 return -1;
951
952 index = ipa_get_param_decl_index_1 (descriptors, op1);
953 if (index < 0
954 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
955 return -1;
956
957 return index;
958 }
959
960 /* Return true if memory reference REF (which must be a load through parameter
961 with INDEX) loads data that are known to be unmodified in this function
962 before reaching statement STMT. */
963
964 static bool
965 parm_ref_data_preserved_p (struct func_body_info *fbi,
966 int index, gimple stmt, tree ref)
967 {
968 struct param_aa_status *paa;
969 bool modified = false;
970 ao_ref refd;
971
972 /* FIXME: FBI can be NULL if we are being called from outside
973 ipa_node_analysis or ipcp_transform_function, which currently happens
974 during inlining analysis. It would be great to extend fbi's lifetime and
975 always have it. Currently, we are just not afraid of too much walking in
976 that case. */
977 if (fbi)
978 {
979 if (aa_overwalked (fbi))
980 return false;
981 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
982 if (paa->ref_modified)
983 return false;
984 }
985 else
986 paa = NULL;
987
988 gcc_checking_assert (gimple_vuse (stmt));
989 ao_ref_init (&refd, ref);
990 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
991 &modified, NULL);
992 if (fbi)
993 fbi->aa_walked += walked;
994 if (paa && modified)
995 paa->ref_modified = true;
996 return !modified;
997 }
998
999 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1000 is known to be unmodified in this function before reaching call statement
1001 CALL into which it is passed. FBI describes the function body. */
1002
1003 static bool
1004 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1005 gimple call, tree parm)
1006 {
1007 bool modified = false;
1008 ao_ref refd;
1009
1010 /* It's unnecessary to calculate anything about memory contnets for a const
1011 function because it is not goin to use it. But do not cache the result
1012 either. Also, no such calculations for non-pointers. */
1013 if (!gimple_vuse (call)
1014 || !POINTER_TYPE_P (TREE_TYPE (parm))
1015 || aa_overwalked (fbi))
1016 return false;
1017
1018 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1019 index);
1020 if (paa->pt_modified)
1021 return false;
1022
1023 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1024 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1025 &modified, NULL);
1026 fbi->aa_walked += walked;
1027 if (modified)
1028 paa->pt_modified = true;
1029 return !modified;
1030 }
1031
1032 /* Return true if we can prove that OP is a memory reference loading unmodified
1033 data from an aggregate passed as a parameter and if the aggregate is passed
1034 by reference, that the alias type of the load corresponds to the type of the
1035 formal parameter (so that we can rely on this type for TBAA in callers).
1036 INFO and PARMS_AINFO describe parameters of the current function (but the
1037 latter can be NULL), STMT is the load statement. If function returns true,
1038 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1039 within the aggregate and whether it is a load from a value passed by
1040 reference respectively. */
1041
1042 static bool
1043 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1044 vec<ipa_param_descriptor> descriptors,
1045 gimple stmt, tree op, int *index_p,
1046 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1047 bool *by_ref_p)
1048 {
1049 int index;
1050 HOST_WIDE_INT size, max_size;
1051 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1052
1053 if (max_size == -1 || max_size != size || *offset_p < 0)
1054 return false;
1055
1056 if (DECL_P (base))
1057 {
1058 int index = ipa_get_param_decl_index_1 (descriptors, base);
1059 if (index >= 0
1060 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1061 {
1062 *index_p = index;
1063 *by_ref_p = false;
1064 if (size_p)
1065 *size_p = size;
1066 return true;
1067 }
1068 return false;
1069 }
1070
1071 if (TREE_CODE (base) != MEM_REF
1072 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1073 || !integer_zerop (TREE_OPERAND (base, 1)))
1074 return false;
1075
1076 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1077 {
1078 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1079 index = ipa_get_param_decl_index_1 (descriptors, parm);
1080 }
1081 else
1082 {
1083 /* This branch catches situations where a pointer parameter is not a
1084 gimple register, for example:
1085
1086 void hip7(S*) (struct S * p)
1087 {
1088 void (*<T2e4>) (struct S *) D.1867;
1089 struct S * p.1;
1090
1091 <bb 2>:
1092 p.1_1 = p;
1093 D.1867_2 = p.1_1->f;
1094 D.1867_2 ();
1095 gdp = &p;
1096 */
1097
1098 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1099 index = load_from_unmodified_param (fbi, descriptors, def);
1100 }
1101
1102 if (index >= 0
1103 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1104 {
1105 *index_p = index;
1106 *by_ref_p = true;
1107 if (size_p)
1108 *size_p = size;
1109 return true;
1110 }
1111 return false;
1112 }
1113
1114 /* Just like the previous function, just without the param_analysis_info
1115 pointer, for users outside of this file. */
1116
1117 bool
1118 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1119 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1120 bool *by_ref_p)
1121 {
1122 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1123 offset_p, NULL, by_ref_p);
1124 }
1125
1126 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1127 of an assignment statement STMT, try to determine whether we are actually
1128 handling any of the following cases and construct an appropriate jump
1129 function into JFUNC if so:
1130
1131 1) The passed value is loaded from a formal parameter which is not a gimple
1132 register (most probably because it is addressable, the value has to be
1133 scalar) and we can guarantee the value has not changed. This case can
1134 therefore be described by a simple pass-through jump function. For example:
1135
1136 foo (int a)
1137 {
1138 int a.0;
1139
1140 a.0_2 = a;
1141 bar (a.0_2);
1142
1143 2) The passed value can be described by a simple arithmetic pass-through
1144 jump function. E.g.
1145
1146 foo (int a)
1147 {
1148 int D.2064;
1149
1150 D.2064_4 = a.1(D) + 4;
1151 bar (D.2064_4);
1152
1153 This case can also occur in combination of the previous one, e.g.:
1154
1155 foo (int a, int z)
1156 {
1157 int a.0;
1158 int D.2064;
1159
1160 a.0_3 = a;
1161 D.2064_4 = a.0_3 + 4;
1162 foo (D.2064_4);
1163
1164 3) The passed value is an address of an object within another one (which
1165 also passed by reference). Such situations are described by an ancestor
1166 jump function and describe situations such as:
1167
1168 B::foo() (struct B * const this)
1169 {
1170 struct A * D.1845;
1171
1172 D.1845_2 = &this_1(D)->D.1748;
1173 A::bar (D.1845_2);
1174
1175 INFO is the structure describing individual parameters access different
1176 stages of IPA optimizations. PARMS_AINFO contains the information that is
1177 only needed for intraprocedural analysis. */
1178
1179 static void
1180 compute_complex_assign_jump_func (struct func_body_info *fbi,
1181 struct ipa_node_params *info,
1182 struct ipa_jump_func *jfunc,
1183 gcall *call, gimple stmt, tree name,
1184 tree param_type)
1185 {
1186 HOST_WIDE_INT offset, size, max_size;
1187 tree op1, tc_ssa, base, ssa;
1188 int index;
1189
1190 op1 = gimple_assign_rhs1 (stmt);
1191
1192 if (TREE_CODE (op1) == SSA_NAME)
1193 {
1194 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1195 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1196 else
1197 index = load_from_unmodified_param (fbi, info->descriptors,
1198 SSA_NAME_DEF_STMT (op1));
1199 tc_ssa = op1;
1200 }
1201 else
1202 {
1203 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1204 tc_ssa = gimple_assign_lhs (stmt);
1205 }
1206
1207 if (index >= 0)
1208 {
1209 tree op2 = gimple_assign_rhs2 (stmt);
1210
1211 if (op2)
1212 {
1213 if (!is_gimple_ip_invariant (op2)
1214 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1215 && !useless_type_conversion_p (TREE_TYPE (name),
1216 TREE_TYPE (op1))))
1217 return;
1218
1219 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1220 gimple_assign_rhs_code (stmt));
1221 }
1222 else if (gimple_assign_single_p (stmt))
1223 {
1224 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1225 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1226 }
1227 return;
1228 }
1229
1230 if (TREE_CODE (op1) != ADDR_EXPR)
1231 return;
1232 op1 = TREE_OPERAND (op1, 0);
1233 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1234 return;
1235 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1236 if (TREE_CODE (base) != MEM_REF
1237 /* If this is a varying address, punt. */
1238 || max_size == -1
1239 || max_size != size)
1240 return;
1241 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1242 ssa = TREE_OPERAND (base, 0);
1243 if (TREE_CODE (ssa) != SSA_NAME
1244 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1245 || offset < 0)
1246 return;
1247
1248 /* Dynamic types are changed in constructors and destructors. */
1249 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1250 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1251 ipa_set_ancestor_jf (jfunc, offset, index,
1252 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1253 }
1254
1255 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1256 it looks like:
1257
1258 iftmp.1_3 = &obj_2(D)->D.1762;
1259
1260 The base of the MEM_REF must be a default definition SSA NAME of a
1261 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1262 whole MEM_REF expression is returned and the offset calculated from any
1263 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1264 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1265
1266 static tree
1267 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1268 {
1269 HOST_WIDE_INT size, max_size;
1270 tree expr, parm, obj;
1271
1272 if (!gimple_assign_single_p (assign))
1273 return NULL_TREE;
1274 expr = gimple_assign_rhs1 (assign);
1275
1276 if (TREE_CODE (expr) != ADDR_EXPR)
1277 return NULL_TREE;
1278 expr = TREE_OPERAND (expr, 0);
1279 obj = expr;
1280 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1281
1282 if (TREE_CODE (expr) != MEM_REF
1283 /* If this is a varying address, punt. */
1284 || max_size == -1
1285 || max_size != size
1286 || *offset < 0)
1287 return NULL_TREE;
1288 parm = TREE_OPERAND (expr, 0);
1289 if (TREE_CODE (parm) != SSA_NAME
1290 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1291 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1292 return NULL_TREE;
1293
1294 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1295 *obj_p = obj;
1296 return expr;
1297 }
1298
1299
1300 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1301 statement PHI, try to find out whether NAME is in fact a
1302 multiple-inheritance typecast from a descendant into an ancestor of a formal
1303 parameter and thus can be described by an ancestor jump function and if so,
1304 write the appropriate function into JFUNC.
1305
1306 Essentially we want to match the following pattern:
1307
1308 if (obj_2(D) != 0B)
1309 goto <bb 3>;
1310 else
1311 goto <bb 4>;
1312
1313 <bb 3>:
1314 iftmp.1_3 = &obj_2(D)->D.1762;
1315
1316 <bb 4>:
1317 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1318 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1319 return D.1879_6; */
1320
1321 static void
1322 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1323 struct ipa_node_params *info,
1324 struct ipa_jump_func *jfunc,
1325 gcall *call, gphi *phi)
1326 {
1327 HOST_WIDE_INT offset;
1328 gimple assign, cond;
1329 basic_block phi_bb, assign_bb, cond_bb;
1330 tree tmp, parm, expr, obj;
1331 int index, i;
1332
1333 if (gimple_phi_num_args (phi) != 2)
1334 return;
1335
1336 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1337 tmp = PHI_ARG_DEF (phi, 0);
1338 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1339 tmp = PHI_ARG_DEF (phi, 1);
1340 else
1341 return;
1342 if (TREE_CODE (tmp) != SSA_NAME
1343 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1344 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1345 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1346 return;
1347
1348 assign = SSA_NAME_DEF_STMT (tmp);
1349 assign_bb = gimple_bb (assign);
1350 if (!single_pred_p (assign_bb))
1351 return;
1352 expr = get_ancestor_addr_info (assign, &obj, &offset);
1353 if (!expr)
1354 return;
1355 parm = TREE_OPERAND (expr, 0);
1356 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1357 if (index < 0)
1358 return;
1359
1360 cond_bb = single_pred (assign_bb);
1361 cond = last_stmt (cond_bb);
1362 if (!cond
1363 || gimple_code (cond) != GIMPLE_COND
1364 || gimple_cond_code (cond) != NE_EXPR
1365 || gimple_cond_lhs (cond) != parm
1366 || !integer_zerop (gimple_cond_rhs (cond)))
1367 return;
1368
1369 phi_bb = gimple_bb (phi);
1370 for (i = 0; i < 2; i++)
1371 {
1372 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1373 if (pred != assign_bb && pred != cond_bb)
1374 return;
1375 }
1376
1377 ipa_set_ancestor_jf (jfunc, offset, index,
1378 parm_ref_data_pass_through_p (fbi, index, call, parm));
1379 }
1380
1381 /* Inspect the given TYPE and return true iff it has the same structure (the
1382 same number of fields of the same types) as a C++ member pointer. If
1383 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1384 corresponding fields there. */
1385
1386 static bool
1387 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1388 {
1389 tree fld;
1390
1391 if (TREE_CODE (type) != RECORD_TYPE)
1392 return false;
1393
1394 fld = TYPE_FIELDS (type);
1395 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1396 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1397 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1398 return false;
1399
1400 if (method_ptr)
1401 *method_ptr = fld;
1402
1403 fld = DECL_CHAIN (fld);
1404 if (!fld || INTEGRAL_TYPE_P (fld)
1405 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1406 return false;
1407 if (delta)
1408 *delta = fld;
1409
1410 if (DECL_CHAIN (fld))
1411 return false;
1412
1413 return true;
1414 }
1415
1416 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1417 return the rhs of its defining statement. Otherwise return RHS as it
1418 is. */
1419
1420 static inline tree
1421 get_ssa_def_if_simple_copy (tree rhs)
1422 {
1423 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1424 {
1425 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1426
1427 if (gimple_assign_single_p (def_stmt))
1428 rhs = gimple_assign_rhs1 (def_stmt);
1429 else
1430 break;
1431 }
1432 return rhs;
1433 }
1434
1435 /* Simple linked list, describing known contents of an aggregate beforere
1436 call. */
1437
1438 struct ipa_known_agg_contents_list
1439 {
1440 /* Offset and size of the described part of the aggregate. */
1441 HOST_WIDE_INT offset, size;
1442 /* Known constant value or NULL if the contents is known to be unknown. */
1443 tree constant;
1444 /* Pointer to the next structure in the list. */
1445 struct ipa_known_agg_contents_list *next;
1446 };
1447
1448 /* Find the proper place in linked list of ipa_known_agg_contents_list
1449 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1450 unless there is a partial overlap, in which case return NULL, or such
1451 element is already there, in which case set *ALREADY_THERE to true. */
1452
1453 static struct ipa_known_agg_contents_list **
1454 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1455 HOST_WIDE_INT lhs_offset,
1456 HOST_WIDE_INT lhs_size,
1457 bool *already_there)
1458 {
1459 struct ipa_known_agg_contents_list **p = list;
1460 while (*p && (*p)->offset < lhs_offset)
1461 {
1462 if ((*p)->offset + (*p)->size > lhs_offset)
1463 return NULL;
1464 p = &(*p)->next;
1465 }
1466
1467 if (*p && (*p)->offset < lhs_offset + lhs_size)
1468 {
1469 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1470 /* We already know this value is subsequently overwritten with
1471 something else. */
1472 *already_there = true;
1473 else
1474 /* Otherwise this is a partial overlap which we cannot
1475 represent. */
1476 return NULL;
1477 }
1478 return p;
1479 }
1480
1481 /* Build aggregate jump function from LIST, assuming there are exactly
1482 CONST_COUNT constant entries there and that th offset of the passed argument
1483 is ARG_OFFSET and store it into JFUNC. */
1484
1485 static void
1486 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1487 int const_count, HOST_WIDE_INT arg_offset,
1488 struct ipa_jump_func *jfunc)
1489 {
1490 vec_alloc (jfunc->agg.items, const_count);
1491 while (list)
1492 {
1493 if (list->constant)
1494 {
1495 struct ipa_agg_jf_item item;
1496 item.offset = list->offset - arg_offset;
1497 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1498 item.value = unshare_expr_without_location (list->constant);
1499 jfunc->agg.items->quick_push (item);
1500 }
1501 list = list->next;
1502 }
1503 }
1504
1505 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1506 in ARG is filled in with constant values. ARG can either be an aggregate
1507 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1508 aggregate. JFUNC is the jump function into which the constants are
1509 subsequently stored. */
1510
1511 static void
1512 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1513 tree arg_type,
1514 struct ipa_jump_func *jfunc)
1515 {
1516 struct ipa_known_agg_contents_list *list = NULL;
1517 int item_count = 0, const_count = 0;
1518 HOST_WIDE_INT arg_offset, arg_size;
1519 gimple_stmt_iterator gsi;
1520 tree arg_base;
1521 bool check_ref, by_ref;
1522 ao_ref r;
1523
1524 /* The function operates in three stages. First, we prepare check_ref, r,
1525 arg_base and arg_offset based on what is actually passed as an actual
1526 argument. */
1527
1528 if (POINTER_TYPE_P (arg_type))
1529 {
1530 by_ref = true;
1531 if (TREE_CODE (arg) == SSA_NAME)
1532 {
1533 tree type_size;
1534 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1535 return;
1536 check_ref = true;
1537 arg_base = arg;
1538 arg_offset = 0;
1539 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1540 arg_size = tree_to_uhwi (type_size);
1541 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1542 }
1543 else if (TREE_CODE (arg) == ADDR_EXPR)
1544 {
1545 HOST_WIDE_INT arg_max_size;
1546
1547 arg = TREE_OPERAND (arg, 0);
1548 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1549 &arg_max_size);
1550 if (arg_max_size == -1
1551 || arg_max_size != arg_size
1552 || arg_offset < 0)
1553 return;
1554 if (DECL_P (arg_base))
1555 {
1556 check_ref = false;
1557 ao_ref_init (&r, arg_base);
1558 }
1559 else
1560 return;
1561 }
1562 else
1563 return;
1564 }
1565 else
1566 {
1567 HOST_WIDE_INT arg_max_size;
1568
1569 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1570
1571 by_ref = false;
1572 check_ref = false;
1573 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1574 &arg_max_size);
1575 if (arg_max_size == -1
1576 || arg_max_size != arg_size
1577 || arg_offset < 0)
1578 return;
1579
1580 ao_ref_init (&r, arg);
1581 }
1582
1583 /* Second stage walks back the BB, looks at individual statements and as long
1584 as it is confident of how the statements affect contents of the
1585 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1586 describing it. */
1587 gsi = gsi_for_stmt (call);
1588 gsi_prev (&gsi);
1589 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1590 {
1591 struct ipa_known_agg_contents_list *n, **p;
1592 gimple stmt = gsi_stmt (gsi);
1593 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1594 tree lhs, rhs, lhs_base;
1595
1596 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1597 continue;
1598 if (!gimple_assign_single_p (stmt))
1599 break;
1600
1601 lhs = gimple_assign_lhs (stmt);
1602 rhs = gimple_assign_rhs1 (stmt);
1603 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1604 || TREE_CODE (lhs) == BIT_FIELD_REF
1605 || contains_bitfld_component_ref_p (lhs))
1606 break;
1607
1608 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1609 &lhs_max_size);
1610 if (lhs_max_size == -1
1611 || lhs_max_size != lhs_size)
1612 break;
1613
1614 if (check_ref)
1615 {
1616 if (TREE_CODE (lhs_base) != MEM_REF
1617 || TREE_OPERAND (lhs_base, 0) != arg_base
1618 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1619 break;
1620 }
1621 else if (lhs_base != arg_base)
1622 {
1623 if (DECL_P (lhs_base))
1624 continue;
1625 else
1626 break;
1627 }
1628
1629 bool already_there = false;
1630 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1631 &already_there);
1632 if (!p)
1633 break;
1634 if (already_there)
1635 continue;
1636
1637 rhs = get_ssa_def_if_simple_copy (rhs);
1638 n = XALLOCA (struct ipa_known_agg_contents_list);
1639 n->size = lhs_size;
1640 n->offset = lhs_offset;
1641 if (is_gimple_ip_invariant (rhs))
1642 {
1643 n->constant = rhs;
1644 const_count++;
1645 }
1646 else
1647 n->constant = NULL_TREE;
1648 n->next = *p;
1649 *p = n;
1650
1651 item_count++;
1652 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1653 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1654 break;
1655 }
1656
1657 /* Third stage just goes over the list and creates an appropriate vector of
1658 ipa_agg_jf_item structures out of it, of sourse only if there are
1659 any known constants to begin with. */
1660
1661 if (const_count)
1662 {
1663 jfunc->agg.by_ref = by_ref;
1664 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1665 }
1666 }
1667
1668 static tree
1669 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1670 {
1671 int n;
1672 tree type = (e->callee
1673 ? TREE_TYPE (e->callee->decl)
1674 : gimple_call_fntype (e->call_stmt));
1675 tree t = TYPE_ARG_TYPES (type);
1676
1677 for (n = 0; n < i; n++)
1678 {
1679 if (!t)
1680 break;
1681 t = TREE_CHAIN (t);
1682 }
1683 if (t)
1684 return TREE_VALUE (t);
1685 if (!e->callee)
1686 return NULL;
1687 t = DECL_ARGUMENTS (e->callee->decl);
1688 for (n = 0; n < i; n++)
1689 {
1690 if (!t)
1691 return NULL;
1692 t = TREE_CHAIN (t);
1693 }
1694 if (t)
1695 return TREE_TYPE (t);
1696 return NULL;
1697 }
1698
1699 /* Compute jump function for all arguments of callsite CS and insert the
1700 information in the jump_functions array in the ipa_edge_args corresponding
1701 to this callsite. */
1702
1703 static void
1704 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1705 struct cgraph_edge *cs)
1706 {
1707 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1708 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1709 gcall *call = cs->call_stmt;
1710 int n, arg_num = gimple_call_num_args (call);
1711 bool useful_context = false;
1712
1713 if (arg_num == 0 || args->jump_functions)
1714 return;
1715 vec_safe_grow_cleared (args->jump_functions, arg_num);
1716 if (flag_devirtualize)
1717 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1718
1719 if (gimple_call_internal_p (call))
1720 return;
1721 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1722 return;
1723
1724 for (n = 0; n < arg_num; n++)
1725 {
1726 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1727 tree arg = gimple_call_arg (call, n);
1728 tree param_type = ipa_get_callee_param_type (cs, n);
1729 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1730 {
1731 tree instance;
1732 struct ipa_polymorphic_call_context context (cs->caller->decl,
1733 arg, cs->call_stmt,
1734 &instance);
1735 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1736 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1737 if (!context.useless_p ())
1738 useful_context = true;
1739 }
1740
1741 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1742 {
1743 unsigned HOST_WIDE_INT hwi_bitpos;
1744 unsigned align;
1745
1746 if (get_pointer_alignment_1 (arg, &align, &hwi_bitpos)
1747 && align % BITS_PER_UNIT == 0
1748 && hwi_bitpos % BITS_PER_UNIT == 0)
1749 {
1750 jfunc->alignment.known = true;
1751 jfunc->alignment.align = align / BITS_PER_UNIT;
1752 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1753 }
1754 else
1755 gcc_assert (!jfunc->alignment.known);
1756 }
1757 else
1758 gcc_assert (!jfunc->alignment.known);
1759
1760 if (is_gimple_ip_invariant (arg))
1761 ipa_set_jf_constant (jfunc, arg, cs);
1762 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1763 && TREE_CODE (arg) == PARM_DECL)
1764 {
1765 int index = ipa_get_param_decl_index (info, arg);
1766
1767 gcc_assert (index >=0);
1768 /* Aggregate passed by value, check for pass-through, otherwise we
1769 will attempt to fill in aggregate contents later in this
1770 for cycle. */
1771 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1772 {
1773 ipa_set_jf_simple_pass_through (jfunc, index, false);
1774 continue;
1775 }
1776 }
1777 else if (TREE_CODE (arg) == SSA_NAME)
1778 {
1779 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1780 {
1781 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1782 if (index >= 0)
1783 {
1784 bool agg_p;
1785 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1786 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1787 }
1788 }
1789 else
1790 {
1791 gimple stmt = SSA_NAME_DEF_STMT (arg);
1792 if (is_gimple_assign (stmt))
1793 compute_complex_assign_jump_func (fbi, info, jfunc,
1794 call, stmt, arg, param_type);
1795 else if (gimple_code (stmt) == GIMPLE_PHI)
1796 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1797 call,
1798 as_a <gphi *> (stmt));
1799 }
1800 }
1801
1802 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1803 passed (because type conversions are ignored in gimple). Usually we can
1804 safely get type from function declaration, but in case of K&R prototypes or
1805 variadic functions we can try our luck with type of the pointer passed.
1806 TODO: Since we look for actual initialization of the memory object, we may better
1807 work out the type based on the memory stores we find. */
1808 if (!param_type)
1809 param_type = TREE_TYPE (arg);
1810
1811 if ((jfunc->type != IPA_JF_PASS_THROUGH
1812 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1813 && (jfunc->type != IPA_JF_ANCESTOR
1814 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1815 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1816 || POINTER_TYPE_P (param_type)))
1817 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1818 }
1819 if (!useful_context)
1820 vec_free (args->polymorphic_call_contexts);
1821 }
1822
1823 /* Compute jump functions for all edges - both direct and indirect - outgoing
1824 from BB. */
1825
1826 static void
1827 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1828 {
1829 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1830 int i;
1831 struct cgraph_edge *cs;
1832
1833 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1834 {
1835 struct cgraph_node *callee = cs->callee;
1836
1837 if (callee)
1838 {
1839 callee->ultimate_alias_target ();
1840 /* We do not need to bother analyzing calls to unknown functions
1841 unless they may become known during lto/whopr. */
1842 if (!callee->definition && !flag_lto)
1843 continue;
1844 }
1845 ipa_compute_jump_functions_for_edge (fbi, cs);
1846 }
1847 }
1848
1849 /* If STMT looks like a statement loading a value from a member pointer formal
1850 parameter, return that parameter and store the offset of the field to
1851 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1852 might be clobbered). If USE_DELTA, then we look for a use of the delta
1853 field rather than the pfn. */
1854
1855 static tree
1856 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1857 HOST_WIDE_INT *offset_p)
1858 {
1859 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1860
1861 if (!gimple_assign_single_p (stmt))
1862 return NULL_TREE;
1863
1864 rhs = gimple_assign_rhs1 (stmt);
1865 if (TREE_CODE (rhs) == COMPONENT_REF)
1866 {
1867 ref_field = TREE_OPERAND (rhs, 1);
1868 rhs = TREE_OPERAND (rhs, 0);
1869 }
1870 else
1871 ref_field = NULL_TREE;
1872 if (TREE_CODE (rhs) != MEM_REF)
1873 return NULL_TREE;
1874 rec = TREE_OPERAND (rhs, 0);
1875 if (TREE_CODE (rec) != ADDR_EXPR)
1876 return NULL_TREE;
1877 rec = TREE_OPERAND (rec, 0);
1878 if (TREE_CODE (rec) != PARM_DECL
1879 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1880 return NULL_TREE;
1881 ref_offset = TREE_OPERAND (rhs, 1);
1882
1883 if (use_delta)
1884 fld = delta_field;
1885 else
1886 fld = ptr_field;
1887 if (offset_p)
1888 *offset_p = int_bit_position (fld);
1889
1890 if (ref_field)
1891 {
1892 if (integer_nonzerop (ref_offset))
1893 return NULL_TREE;
1894 return ref_field == fld ? rec : NULL_TREE;
1895 }
1896 else
1897 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1898 : NULL_TREE;
1899 }
1900
1901 /* Returns true iff T is an SSA_NAME defined by a statement. */
1902
1903 static bool
1904 ipa_is_ssa_with_stmt_def (tree t)
1905 {
1906 if (TREE_CODE (t) == SSA_NAME
1907 && !SSA_NAME_IS_DEFAULT_DEF (t))
1908 return true;
1909 else
1910 return false;
1911 }
1912
1913 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1914 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1915 indirect call graph edge. */
1916
1917 static struct cgraph_edge *
1918 ipa_note_param_call (struct cgraph_node *node, int param_index,
1919 gcall *stmt)
1920 {
1921 struct cgraph_edge *cs;
1922
1923 cs = node->get_edge (stmt);
1924 cs->indirect_info->param_index = param_index;
1925 cs->indirect_info->agg_contents = 0;
1926 cs->indirect_info->member_ptr = 0;
1927 return cs;
1928 }
1929
1930 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1931 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1932 intermediate information about each formal parameter. Currently it checks
1933 whether the call calls a pointer that is a formal parameter and if so, the
1934 parameter is marked with the called flag and an indirect call graph edge
1935 describing the call is created. This is very simple for ordinary pointers
1936 represented in SSA but not-so-nice when it comes to member pointers. The
1937 ugly part of this function does nothing more than trying to match the
1938 pattern of such a call. An example of such a pattern is the gimple dump
1939 below, the call is on the last line:
1940
1941 <bb 2>:
1942 f$__delta_5 = f.__delta;
1943 f$__pfn_24 = f.__pfn;
1944
1945 or
1946 <bb 2>:
1947 f$__delta_5 = MEM[(struct *)&f];
1948 f$__pfn_24 = MEM[(struct *)&f + 4B];
1949
1950 and a few lines below:
1951
1952 <bb 5>
1953 D.2496_3 = (int) f$__pfn_24;
1954 D.2497_4 = D.2496_3 & 1;
1955 if (D.2497_4 != 0)
1956 goto <bb 3>;
1957 else
1958 goto <bb 4>;
1959
1960 <bb 6>:
1961 D.2500_7 = (unsigned int) f$__delta_5;
1962 D.2501_8 = &S + D.2500_7;
1963 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1964 D.2503_10 = *D.2502_9;
1965 D.2504_12 = f$__pfn_24 + -1;
1966 D.2505_13 = (unsigned int) D.2504_12;
1967 D.2506_14 = D.2503_10 + D.2505_13;
1968 D.2507_15 = *D.2506_14;
1969 iftmp.11_16 = (String:: *) D.2507_15;
1970
1971 <bb 7>:
1972 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1973 D.2500_19 = (unsigned int) f$__delta_5;
1974 D.2508_20 = &S + D.2500_19;
1975 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1976
1977 Such patterns are results of simple calls to a member pointer:
1978
1979 int doprinting (int (MyString::* f)(int) const)
1980 {
1981 MyString S ("somestring");
1982
1983 return (S.*f)(4);
1984 }
1985
1986 Moreover, the function also looks for called pointers loaded from aggregates
1987 passed by value or reference. */
1988
1989 static void
1990 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gcall *call,
1991 tree target)
1992 {
1993 struct ipa_node_params *info = fbi->info;
1994 HOST_WIDE_INT offset;
1995 bool by_ref;
1996
1997 if (SSA_NAME_IS_DEFAULT_DEF (target))
1998 {
1999 tree var = SSA_NAME_VAR (target);
2000 int index = ipa_get_param_decl_index (info, var);
2001 if (index >= 0)
2002 ipa_note_param_call (fbi->node, index, call);
2003 return;
2004 }
2005
2006 int index;
2007 gimple def = SSA_NAME_DEF_STMT (target);
2008 if (gimple_assign_single_p (def)
2009 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2010 gimple_assign_rhs1 (def), &index, &offset,
2011 NULL, &by_ref))
2012 {
2013 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2014 cs->indirect_info->offset = offset;
2015 cs->indirect_info->agg_contents = 1;
2016 cs->indirect_info->by_ref = by_ref;
2017 return;
2018 }
2019
2020 /* Now we need to try to match the complex pattern of calling a member
2021 pointer. */
2022 if (gimple_code (def) != GIMPLE_PHI
2023 || gimple_phi_num_args (def) != 2
2024 || !POINTER_TYPE_P (TREE_TYPE (target))
2025 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2026 return;
2027
2028 /* First, we need to check whether one of these is a load from a member
2029 pointer that is a parameter to this function. */
2030 tree n1 = PHI_ARG_DEF (def, 0);
2031 tree n2 = PHI_ARG_DEF (def, 1);
2032 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2033 return;
2034 gimple d1 = SSA_NAME_DEF_STMT (n1);
2035 gimple d2 = SSA_NAME_DEF_STMT (n2);
2036
2037 tree rec;
2038 basic_block bb, virt_bb;
2039 basic_block join = gimple_bb (def);
2040 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2041 {
2042 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2043 return;
2044
2045 bb = EDGE_PRED (join, 0)->src;
2046 virt_bb = gimple_bb (d2);
2047 }
2048 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2049 {
2050 bb = EDGE_PRED (join, 1)->src;
2051 virt_bb = gimple_bb (d1);
2052 }
2053 else
2054 return;
2055
2056 /* Second, we need to check that the basic blocks are laid out in the way
2057 corresponding to the pattern. */
2058
2059 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2060 || single_pred (virt_bb) != bb
2061 || single_succ (virt_bb) != join)
2062 return;
2063
2064 /* Third, let's see that the branching is done depending on the least
2065 significant bit of the pfn. */
2066
2067 gimple branch = last_stmt (bb);
2068 if (!branch || gimple_code (branch) != GIMPLE_COND)
2069 return;
2070
2071 if ((gimple_cond_code (branch) != NE_EXPR
2072 && gimple_cond_code (branch) != EQ_EXPR)
2073 || !integer_zerop (gimple_cond_rhs (branch)))
2074 return;
2075
2076 tree cond = gimple_cond_lhs (branch);
2077 if (!ipa_is_ssa_with_stmt_def (cond))
2078 return;
2079
2080 def = SSA_NAME_DEF_STMT (cond);
2081 if (!is_gimple_assign (def)
2082 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2083 || !integer_onep (gimple_assign_rhs2 (def)))
2084 return;
2085
2086 cond = gimple_assign_rhs1 (def);
2087 if (!ipa_is_ssa_with_stmt_def (cond))
2088 return;
2089
2090 def = SSA_NAME_DEF_STMT (cond);
2091
2092 if (is_gimple_assign (def)
2093 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2094 {
2095 cond = gimple_assign_rhs1 (def);
2096 if (!ipa_is_ssa_with_stmt_def (cond))
2097 return;
2098 def = SSA_NAME_DEF_STMT (cond);
2099 }
2100
2101 tree rec2;
2102 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2103 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2104 == ptrmemfunc_vbit_in_delta),
2105 NULL);
2106 if (rec != rec2)
2107 return;
2108
2109 index = ipa_get_param_decl_index (info, rec);
2110 if (index >= 0
2111 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2112 {
2113 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2114 cs->indirect_info->offset = offset;
2115 cs->indirect_info->agg_contents = 1;
2116 cs->indirect_info->member_ptr = 1;
2117 }
2118
2119 return;
2120 }
2121
2122 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2123 object referenced in the expression is a formal parameter of the caller
2124 FBI->node (described by FBI->info), create a call note for the
2125 statement. */
2126
2127 static void
2128 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2129 gcall *call, tree target)
2130 {
2131 tree obj = OBJ_TYPE_REF_OBJECT (target);
2132 int index;
2133 HOST_WIDE_INT anc_offset;
2134
2135 if (!flag_devirtualize)
2136 return;
2137
2138 if (TREE_CODE (obj) != SSA_NAME)
2139 return;
2140
2141 struct ipa_node_params *info = fbi->info;
2142 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2143 {
2144 struct ipa_jump_func jfunc;
2145 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2146 return;
2147
2148 anc_offset = 0;
2149 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2150 gcc_assert (index >= 0);
2151 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2152 call, &jfunc))
2153 return;
2154 }
2155 else
2156 {
2157 struct ipa_jump_func jfunc;
2158 gimple stmt = SSA_NAME_DEF_STMT (obj);
2159 tree expr;
2160
2161 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2162 if (!expr)
2163 return;
2164 index = ipa_get_param_decl_index (info,
2165 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2166 gcc_assert (index >= 0);
2167 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2168 call, &jfunc, anc_offset))
2169 return;
2170 }
2171
2172 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2173 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2174 ii->offset = anc_offset;
2175 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2176 ii->otr_type = obj_type_ref_class (target);
2177 ii->polymorphic = 1;
2178 }
2179
2180 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2181 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2182 containing intermediate information about each formal parameter. */
2183
2184 static void
2185 ipa_analyze_call_uses (struct func_body_info *fbi, gcall *call)
2186 {
2187 tree target = gimple_call_fn (call);
2188
2189 if (!target
2190 || (TREE_CODE (target) != SSA_NAME
2191 && !virtual_method_call_p (target)))
2192 return;
2193
2194 struct cgraph_edge *cs = fbi->node->get_edge (call);
2195 /* If we previously turned the call into a direct call, there is
2196 no need to analyze. */
2197 if (cs && !cs->indirect_unknown_callee)
2198 return;
2199
2200 if (cs->indirect_info->polymorphic && flag_devirtualize)
2201 {
2202 tree instance;
2203 tree target = gimple_call_fn (call);
2204 ipa_polymorphic_call_context context (current_function_decl,
2205 target, call, &instance);
2206
2207 gcc_checking_assert (cs->indirect_info->otr_type
2208 == obj_type_ref_class (target));
2209 gcc_checking_assert (cs->indirect_info->otr_token
2210 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2211
2212 cs->indirect_info->vptr_changed
2213 = !context.get_dynamic_type (instance,
2214 OBJ_TYPE_REF_OBJECT (target),
2215 obj_type_ref_class (target), call);
2216 cs->indirect_info->context = context;
2217 }
2218
2219 if (TREE_CODE (target) == SSA_NAME)
2220 ipa_analyze_indirect_call_uses (fbi, call, target);
2221 else if (virtual_method_call_p (target))
2222 ipa_analyze_virtual_call_uses (fbi, call, target);
2223 }
2224
2225
2226 /* Analyze the call statement STMT with respect to formal parameters (described
2227 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2228 formal parameters are called. */
2229
2230 static void
2231 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2232 {
2233 if (is_gimple_call (stmt))
2234 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2235 }
2236
2237 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2238 If OP is a parameter declaration, mark it as used in the info structure
2239 passed in DATA. */
2240
2241 static bool
2242 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2243 {
2244 struct ipa_node_params *info = (struct ipa_node_params *) data;
2245
2246 op = get_base_address (op);
2247 if (op
2248 && TREE_CODE (op) == PARM_DECL)
2249 {
2250 int index = ipa_get_param_decl_index (info, op);
2251 gcc_assert (index >= 0);
2252 ipa_set_param_used (info, index, true);
2253 }
2254
2255 return false;
2256 }
2257
2258 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2259 the findings in various structures of the associated ipa_node_params
2260 structure, such as parameter flags, notes etc. FBI holds various data about
2261 the function being analyzed. */
2262
2263 static void
2264 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2265 {
2266 gimple_stmt_iterator gsi;
2267 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2268 {
2269 gimple stmt = gsi_stmt (gsi);
2270
2271 if (is_gimple_debug (stmt))
2272 continue;
2273
2274 ipa_analyze_stmt_uses (fbi, stmt);
2275 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2276 visit_ref_for_mod_analysis,
2277 visit_ref_for_mod_analysis,
2278 visit_ref_for_mod_analysis);
2279 }
2280 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2281 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2282 visit_ref_for_mod_analysis,
2283 visit_ref_for_mod_analysis,
2284 visit_ref_for_mod_analysis);
2285 }
2286
2287 /* Calculate controlled uses of parameters of NODE. */
2288
2289 static void
2290 ipa_analyze_controlled_uses (struct cgraph_node *node)
2291 {
2292 struct ipa_node_params *info = IPA_NODE_REF (node);
2293
2294 for (int i = 0; i < ipa_get_param_count (info); i++)
2295 {
2296 tree parm = ipa_get_param (info, i);
2297 int controlled_uses = 0;
2298
2299 /* For SSA regs see if parameter is used. For non-SSA we compute
2300 the flag during modification analysis. */
2301 if (is_gimple_reg (parm))
2302 {
2303 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2304 parm);
2305 if (ddef && !has_zero_uses (ddef))
2306 {
2307 imm_use_iterator imm_iter;
2308 use_operand_p use_p;
2309
2310 ipa_set_param_used (info, i, true);
2311 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2312 if (!is_gimple_call (USE_STMT (use_p)))
2313 {
2314 if (!is_gimple_debug (USE_STMT (use_p)))
2315 {
2316 controlled_uses = IPA_UNDESCRIBED_USE;
2317 break;
2318 }
2319 }
2320 else
2321 controlled_uses++;
2322 }
2323 else
2324 controlled_uses = 0;
2325 }
2326 else
2327 controlled_uses = IPA_UNDESCRIBED_USE;
2328 ipa_set_controlled_uses (info, i, controlled_uses);
2329 }
2330 }
2331
2332 /* Free stuff in BI. */
2333
2334 static void
2335 free_ipa_bb_info (struct ipa_bb_info *bi)
2336 {
2337 bi->cg_edges.release ();
2338 bi->param_aa_statuses.release ();
2339 }
2340
2341 /* Dominator walker driving the analysis. */
2342
2343 class analysis_dom_walker : public dom_walker
2344 {
2345 public:
2346 analysis_dom_walker (struct func_body_info *fbi)
2347 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2348
2349 virtual void before_dom_children (basic_block);
2350
2351 private:
2352 struct func_body_info *m_fbi;
2353 };
2354
2355 void
2356 analysis_dom_walker::before_dom_children (basic_block bb)
2357 {
2358 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2359 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2360 }
2361
2362 /* Initialize the array describing properties of of formal parameters
2363 of NODE, analyze their uses and compute jump functions associated
2364 with actual arguments of calls from within NODE. */
2365
2366 void
2367 ipa_analyze_node (struct cgraph_node *node)
2368 {
2369 struct func_body_info fbi;
2370 struct ipa_node_params *info;
2371
2372 ipa_check_create_node_params ();
2373 ipa_check_create_edge_args ();
2374 info = IPA_NODE_REF (node);
2375
2376 if (info->analysis_done)
2377 return;
2378 info->analysis_done = 1;
2379
2380 if (ipa_func_spec_opts_forbid_analysis_p (node))
2381 {
2382 for (int i = 0; i < ipa_get_param_count (info); i++)
2383 {
2384 ipa_set_param_used (info, i, true);
2385 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2386 }
2387 return;
2388 }
2389
2390 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2391 push_cfun (func);
2392 calculate_dominance_info (CDI_DOMINATORS);
2393 ipa_initialize_node_params (node);
2394 ipa_analyze_controlled_uses (node);
2395
2396 fbi.node = node;
2397 fbi.info = IPA_NODE_REF (node);
2398 fbi.bb_infos = vNULL;
2399 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2400 fbi.param_count = ipa_get_param_count (info);
2401 fbi.aa_walked = 0;
2402
2403 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2404 {
2405 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2406 bi->cg_edges.safe_push (cs);
2407 }
2408
2409 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2410 {
2411 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2412 bi->cg_edges.safe_push (cs);
2413 }
2414
2415 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2416
2417 int i;
2418 struct ipa_bb_info *bi;
2419 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2420 free_ipa_bb_info (bi);
2421 fbi.bb_infos.release ();
2422 free_dominance_info (CDI_DOMINATORS);
2423 pop_cfun ();
2424 }
2425
2426 /* Update the jump functions associated with call graph edge E when the call
2427 graph edge CS is being inlined, assuming that E->caller is already (possibly
2428 indirectly) inlined into CS->callee and that E has not been inlined. */
2429
2430 static void
2431 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2432 struct cgraph_edge *e)
2433 {
2434 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2435 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2436 int count = ipa_get_cs_argument_count (args);
2437 int i;
2438
2439 for (i = 0; i < count; i++)
2440 {
2441 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2442 struct ipa_polymorphic_call_context *dst_ctx
2443 = ipa_get_ith_polymorhic_call_context (args, i);
2444
2445 if (dst->type == IPA_JF_ANCESTOR)
2446 {
2447 struct ipa_jump_func *src;
2448 int dst_fid = dst->value.ancestor.formal_id;
2449 struct ipa_polymorphic_call_context *src_ctx
2450 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2451
2452 /* Variable number of arguments can cause havoc if we try to access
2453 one that does not exist in the inlined edge. So make sure we
2454 don't. */
2455 if (dst_fid >= ipa_get_cs_argument_count (top))
2456 {
2457 ipa_set_jf_unknown (dst);
2458 continue;
2459 }
2460
2461 src = ipa_get_ith_jump_func (top, dst_fid);
2462
2463 if (src_ctx && !src_ctx->useless_p ())
2464 {
2465 struct ipa_polymorphic_call_context ctx = *src_ctx;
2466
2467 /* TODO: Make type preserved safe WRT contexts. */
2468 if (!ipa_get_jf_ancestor_type_preserved (dst))
2469 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2470 ctx.offset_by (dst->value.ancestor.offset);
2471 if (!ctx.useless_p ())
2472 {
2473 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2474 count);
2475 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2476 }
2477 dst_ctx->combine_with (ctx);
2478 }
2479
2480 if (src->agg.items
2481 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2482 {
2483 struct ipa_agg_jf_item *item;
2484 int j;
2485
2486 /* Currently we do not produce clobber aggregate jump functions,
2487 replace with merging when we do. */
2488 gcc_assert (!dst->agg.items);
2489
2490 dst->agg.items = vec_safe_copy (src->agg.items);
2491 dst->agg.by_ref = src->agg.by_ref;
2492 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2493 item->offset -= dst->value.ancestor.offset;
2494 }
2495
2496 if (src->type == IPA_JF_PASS_THROUGH
2497 && src->value.pass_through.operation == NOP_EXPR)
2498 {
2499 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2500 dst->value.ancestor.agg_preserved &=
2501 src->value.pass_through.agg_preserved;
2502 }
2503 else if (src->type == IPA_JF_ANCESTOR)
2504 {
2505 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2506 dst->value.ancestor.offset += src->value.ancestor.offset;
2507 dst->value.ancestor.agg_preserved &=
2508 src->value.ancestor.agg_preserved;
2509 }
2510 else
2511 ipa_set_jf_unknown (dst);
2512 }
2513 else if (dst->type == IPA_JF_PASS_THROUGH)
2514 {
2515 struct ipa_jump_func *src;
2516 /* We must check range due to calls with variable number of arguments
2517 and we cannot combine jump functions with operations. */
2518 if (dst->value.pass_through.operation == NOP_EXPR
2519 && (dst->value.pass_through.formal_id
2520 < ipa_get_cs_argument_count (top)))
2521 {
2522 int dst_fid = dst->value.pass_through.formal_id;
2523 src = ipa_get_ith_jump_func (top, dst_fid);
2524 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2525 struct ipa_polymorphic_call_context *src_ctx
2526 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2527
2528 if (src_ctx && !src_ctx->useless_p ())
2529 {
2530 struct ipa_polymorphic_call_context ctx = *src_ctx;
2531
2532 /* TODO: Make type preserved safe WRT contexts. */
2533 if (!ipa_get_jf_pass_through_type_preserved (dst))
2534 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2535 if (!ctx.useless_p ())
2536 {
2537 if (!dst_ctx)
2538 {
2539 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2540 count);
2541 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2542 }
2543 dst_ctx->combine_with (ctx);
2544 }
2545 }
2546 switch (src->type)
2547 {
2548 case IPA_JF_UNKNOWN:
2549 ipa_set_jf_unknown (dst);
2550 break;
2551 case IPA_JF_CONST:
2552 ipa_set_jf_cst_copy (dst, src);
2553 break;
2554
2555 case IPA_JF_PASS_THROUGH:
2556 {
2557 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2558 enum tree_code operation;
2559 operation = ipa_get_jf_pass_through_operation (src);
2560
2561 if (operation == NOP_EXPR)
2562 {
2563 bool agg_p;
2564 agg_p = dst_agg_p
2565 && ipa_get_jf_pass_through_agg_preserved (src);
2566 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2567 }
2568 else
2569 {
2570 tree operand = ipa_get_jf_pass_through_operand (src);
2571 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2572 operation);
2573 }
2574 break;
2575 }
2576 case IPA_JF_ANCESTOR:
2577 {
2578 bool agg_p;
2579 agg_p = dst_agg_p
2580 && ipa_get_jf_ancestor_agg_preserved (src);
2581 ipa_set_ancestor_jf (dst,
2582 ipa_get_jf_ancestor_offset (src),
2583 ipa_get_jf_ancestor_formal_id (src),
2584 agg_p);
2585 break;
2586 }
2587 default:
2588 gcc_unreachable ();
2589 }
2590
2591 if (src->agg.items
2592 && (dst_agg_p || !src->agg.by_ref))
2593 {
2594 /* Currently we do not produce clobber aggregate jump
2595 functions, replace with merging when we do. */
2596 gcc_assert (!dst->agg.items);
2597
2598 dst->agg.by_ref = src->agg.by_ref;
2599 dst->agg.items = vec_safe_copy (src->agg.items);
2600 }
2601 }
2602 else
2603 ipa_set_jf_unknown (dst);
2604 }
2605 }
2606 }
2607
2608 /* If TARGET is an addr_expr of a function declaration, make it the
2609 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2610 Otherwise, return NULL. */
2611
2612 struct cgraph_edge *
2613 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2614 bool speculative)
2615 {
2616 struct cgraph_node *callee;
2617 struct inline_edge_summary *es = inline_edge_summary (ie);
2618 bool unreachable = false;
2619
2620 if (TREE_CODE (target) == ADDR_EXPR)
2621 target = TREE_OPERAND (target, 0);
2622 if (TREE_CODE (target) != FUNCTION_DECL)
2623 {
2624 target = canonicalize_constructor_val (target, NULL);
2625 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2626 {
2627 /* Member pointer call that goes through a VMT lookup. */
2628 if (ie->indirect_info->member_ptr
2629 /* Or if target is not an invariant expression and we do not
2630 know if it will evaulate to function at runtime.
2631 This can happen when folding through &VAR, where &VAR
2632 is IP invariant, but VAR itself is not.
2633
2634 TODO: Revisit this when GCC 5 is branched. It seems that
2635 member_ptr check is not needed and that we may try to fold
2636 the expression and see if VAR is readonly. */
2637 || !is_gimple_ip_invariant (target))
2638 {
2639 if (dump_enabled_p ())
2640 {
2641 location_t loc = gimple_location_safe (ie->call_stmt);
2642 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2643 "discovered direct call non-invariant "
2644 "%s/%i\n",
2645 ie->caller->name (), ie->caller->order);
2646 }
2647 return NULL;
2648 }
2649
2650
2651 if (dump_enabled_p ())
2652 {
2653 location_t loc = gimple_location_safe (ie->call_stmt);
2654 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2655 "discovered direct call to non-function in %s/%i, "
2656 "making it __builtin_unreachable\n",
2657 ie->caller->name (), ie->caller->order);
2658 }
2659
2660 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2661 callee = cgraph_node::get_create (target);
2662 unreachable = true;
2663 }
2664 else
2665 callee = cgraph_node::get (target);
2666 }
2667 else
2668 callee = cgraph_node::get (target);
2669
2670 /* Because may-edges are not explicitely represented and vtable may be external,
2671 we may create the first reference to the object in the unit. */
2672 if (!callee || callee->global.inlined_to)
2673 {
2674
2675 /* We are better to ensure we can refer to it.
2676 In the case of static functions we are out of luck, since we already
2677 removed its body. In the case of public functions we may or may
2678 not introduce the reference. */
2679 if (!canonicalize_constructor_val (target, NULL)
2680 || !TREE_PUBLIC (target))
2681 {
2682 if (dump_file)
2683 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2684 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2685 xstrdup_for_dump (ie->caller->name ()),
2686 ie->caller->order,
2687 xstrdup_for_dump (ie->callee->name ()),
2688 ie->callee->order);
2689 return NULL;
2690 }
2691 callee = cgraph_node::get_create (target);
2692 }
2693
2694 /* If the edge is already speculated. */
2695 if (speculative && ie->speculative)
2696 {
2697 struct cgraph_edge *e2;
2698 struct ipa_ref *ref;
2699 ie->speculative_call_info (e2, ie, ref);
2700 if (e2->callee->ultimate_alias_target ()
2701 != callee->ultimate_alias_target ())
2702 {
2703 if (dump_file)
2704 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2705 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2706 xstrdup_for_dump (ie->caller->name ()),
2707 ie->caller->order,
2708 xstrdup_for_dump (callee->name ()),
2709 callee->order,
2710 xstrdup_for_dump (e2->callee->name ()),
2711 e2->callee->order);
2712 }
2713 else
2714 {
2715 if (dump_file)
2716 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2717 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2718 xstrdup_for_dump (ie->caller->name ()),
2719 ie->caller->order,
2720 xstrdup_for_dump (callee->name ()),
2721 callee->order);
2722 }
2723 return NULL;
2724 }
2725
2726 if (!dbg_cnt (devirt))
2727 return NULL;
2728
2729 ipa_check_create_node_params ();
2730
2731 /* We can not make edges to inline clones. It is bug that someone removed
2732 the cgraph node too early. */
2733 gcc_assert (!callee->global.inlined_to);
2734
2735 if (dump_file && !unreachable)
2736 {
2737 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2738 "(%s/%i -> %s/%i), for stmt ",
2739 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2740 speculative ? "speculative" : "known",
2741 xstrdup_for_dump (ie->caller->name ()),
2742 ie->caller->order,
2743 xstrdup_for_dump (callee->name ()),
2744 callee->order);
2745 if (ie->call_stmt)
2746 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2747 else
2748 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2749 }
2750 if (dump_enabled_p ())
2751 {
2752 location_t loc = gimple_location_safe (ie->call_stmt);
2753
2754 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2755 "converting indirect call in %s to direct call to %s\n",
2756 ie->caller->name (), callee->name ());
2757 }
2758 if (!speculative)
2759 {
2760 struct cgraph_edge *orig = ie;
2761 ie = ie->make_direct (callee);
2762 /* If we resolved speculative edge the cost is already up to date
2763 for direct call (adjusted by inline_edge_duplication_hook). */
2764 if (ie == orig)
2765 {
2766 es = inline_edge_summary (ie);
2767 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2768 - eni_size_weights.call_cost);
2769 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2770 - eni_time_weights.call_cost);
2771 }
2772 }
2773 else
2774 {
2775 if (!callee->can_be_discarded_p ())
2776 {
2777 cgraph_node *alias;
2778 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2779 if (alias)
2780 callee = alias;
2781 }
2782 /* make_speculative will update ie's cost to direct call cost. */
2783 ie = ie->make_speculative
2784 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2785 }
2786
2787 return ie;
2788 }
2789
2790 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2791 return NULL if there is not any. BY_REF specifies whether the value has to
2792 be passed by reference or by value. */
2793
2794 tree
2795 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2796 HOST_WIDE_INT offset, bool by_ref)
2797 {
2798 struct ipa_agg_jf_item *item;
2799 int i;
2800
2801 if (by_ref != agg->by_ref)
2802 return NULL;
2803
2804 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2805 if (item->offset == offset)
2806 {
2807 /* Currently we do not have clobber values, return NULL for them once
2808 we do. */
2809 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2810 return item->value;
2811 }
2812 return NULL;
2813 }
2814
2815 /* Remove a reference to SYMBOL from the list of references of a node given by
2816 reference description RDESC. Return true if the reference has been
2817 successfully found and removed. */
2818
2819 static bool
2820 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2821 {
2822 struct ipa_ref *to_del;
2823 struct cgraph_edge *origin;
2824
2825 origin = rdesc->cs;
2826 if (!origin)
2827 return false;
2828 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2829 origin->lto_stmt_uid);
2830 if (!to_del)
2831 return false;
2832
2833 to_del->remove_reference ();
2834 if (dump_file)
2835 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2836 xstrdup_for_dump (origin->caller->name ()),
2837 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2838 return true;
2839 }
2840
2841 /* If JFUNC has a reference description with refcount different from
2842 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2843 NULL. JFUNC must be a constant jump function. */
2844
2845 static struct ipa_cst_ref_desc *
2846 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2847 {
2848 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2849 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2850 return rdesc;
2851 else
2852 return NULL;
2853 }
2854
2855 /* If the value of constant jump function JFUNC is an address of a function
2856 declaration, return the associated call graph node. Otherwise return
2857 NULL. */
2858
2859 static cgraph_node *
2860 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2861 {
2862 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2863 tree cst = ipa_get_jf_constant (jfunc);
2864 if (TREE_CODE (cst) != ADDR_EXPR
2865 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2866 return NULL;
2867
2868 return cgraph_node::get (TREE_OPERAND (cst, 0));
2869 }
2870
2871
2872 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2873 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2874 the edge specified in the rdesc. Return false if either the symbol or the
2875 reference could not be found, otherwise return true. */
2876
2877 static bool
2878 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2879 {
2880 struct ipa_cst_ref_desc *rdesc;
2881 if (jfunc->type == IPA_JF_CONST
2882 && (rdesc = jfunc_rdesc_usable (jfunc))
2883 && --rdesc->refcount == 0)
2884 {
2885 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2886 if (!symbol)
2887 return false;
2888
2889 return remove_described_reference (symbol, rdesc);
2890 }
2891 return true;
2892 }
2893
2894 /* Try to find a destination for indirect edge IE that corresponds to a simple
2895 call or a call of a member function pointer and where the destination is a
2896 pointer formal parameter described by jump function JFUNC. If it can be
2897 determined, return the newly direct edge, otherwise return NULL.
2898 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2899
2900 static struct cgraph_edge *
2901 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2902 struct ipa_jump_func *jfunc,
2903 struct ipa_node_params *new_root_info)
2904 {
2905 struct cgraph_edge *cs;
2906 tree target;
2907 bool agg_contents = ie->indirect_info->agg_contents;
2908
2909 if (ie->indirect_info->agg_contents)
2910 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2911 ie->indirect_info->offset,
2912 ie->indirect_info->by_ref);
2913 else
2914 target = ipa_value_from_jfunc (new_root_info, jfunc);
2915 if (!target)
2916 return NULL;
2917 cs = ipa_make_edge_direct_to_target (ie, target);
2918
2919 if (cs && !agg_contents)
2920 {
2921 bool ok;
2922 gcc_checking_assert (cs->callee
2923 && (cs != ie
2924 || jfunc->type != IPA_JF_CONST
2925 || !cgraph_node_for_jfunc (jfunc)
2926 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2927 ok = try_decrement_rdesc_refcount (jfunc);
2928 gcc_checking_assert (ok);
2929 }
2930
2931 return cs;
2932 }
2933
2934 /* Return the target to be used in cases of impossible devirtualization. IE
2935 and target (the latter can be NULL) are dumped when dumping is enabled. */
2936
2937 tree
2938 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2939 {
2940 if (dump_file)
2941 {
2942 if (target)
2943 fprintf (dump_file,
2944 "Type inconsistent devirtualization: %s/%i->%s\n",
2945 ie->caller->name (), ie->caller->order,
2946 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2947 else
2948 fprintf (dump_file,
2949 "No devirtualization target in %s/%i\n",
2950 ie->caller->name (), ie->caller->order);
2951 }
2952 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2953 cgraph_node::get_create (new_target);
2954 return new_target;
2955 }
2956
2957 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2958 call based on a formal parameter which is described by jump function JFUNC
2959 and if it can be determined, make it direct and return the direct edge.
2960 Otherwise, return NULL. CTX describes the polymorphic context that the
2961 parameter the call is based on brings along with it. */
2962
2963 static struct cgraph_edge *
2964 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2965 struct ipa_jump_func *jfunc,
2966 struct ipa_polymorphic_call_context ctx)
2967 {
2968 tree target = NULL;
2969 bool speculative = false;
2970
2971 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2972 return NULL;
2973
2974 gcc_assert (!ie->indirect_info->by_ref);
2975
2976 /* Try to do lookup via known virtual table pointer value. */
2977 if (!ie->indirect_info->vptr_changed
2978 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
2979 {
2980 tree vtable;
2981 unsigned HOST_WIDE_INT offset;
2982 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2983 ie->indirect_info->offset,
2984 true);
2985 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2986 {
2987 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2988 vtable, offset);
2989 if (t)
2990 {
2991 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2992 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2993 || !possible_polymorphic_call_target_p
2994 (ie, cgraph_node::get (t)))
2995 {
2996 /* Do not speculate builtin_unreachable, it is stupid! */
2997 if (!ie->indirect_info->vptr_changed)
2998 target = ipa_impossible_devirt_target (ie, target);
2999 }
3000 else
3001 {
3002 target = t;
3003 speculative = ie->indirect_info->vptr_changed;
3004 }
3005 }
3006 }
3007 }
3008
3009 ipa_polymorphic_call_context ie_context (ie);
3010 vec <cgraph_node *>targets;
3011 bool final;
3012
3013 ctx.offset_by (ie->indirect_info->offset);
3014 if (ie->indirect_info->vptr_changed)
3015 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3016 ie->indirect_info->otr_type);
3017 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3018 targets = possible_polymorphic_call_targets
3019 (ie->indirect_info->otr_type,
3020 ie->indirect_info->otr_token,
3021 ctx, &final);
3022 if (final && targets.length () <= 1)
3023 {
3024 speculative = false;
3025 if (targets.length () == 1)
3026 target = targets[0]->decl;
3027 else
3028 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3029 }
3030 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3031 && !ie->speculative && ie->maybe_hot_p ())
3032 {
3033 cgraph_node *n;
3034 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3035 ie->indirect_info->otr_token,
3036 ie->indirect_info->context);
3037 if (n)
3038 {
3039 target = n->decl;
3040 speculative = true;
3041 }
3042 }
3043
3044 if (target)
3045 {
3046 if (!possible_polymorphic_call_target_p
3047 (ie, cgraph_node::get_create (target)))
3048 {
3049 if (speculative)
3050 return NULL;
3051 target = ipa_impossible_devirt_target (ie, target);
3052 }
3053 return ipa_make_edge_direct_to_target (ie, target, speculative);
3054 }
3055 else
3056 return NULL;
3057 }
3058
3059 /* Update the param called notes associated with NODE when CS is being inlined,
3060 assuming NODE is (potentially indirectly) inlined into CS->callee.
3061 Moreover, if the callee is discovered to be constant, create a new cgraph
3062 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3063 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3064
3065 static bool
3066 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3067 struct cgraph_node *node,
3068 vec<cgraph_edge *> *new_edges)
3069 {
3070 struct ipa_edge_args *top;
3071 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3072 struct ipa_node_params *new_root_info;
3073 bool res = false;
3074
3075 ipa_check_create_edge_args ();
3076 top = IPA_EDGE_REF (cs);
3077 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3078 ? cs->caller->global.inlined_to
3079 : cs->caller);
3080
3081 for (ie = node->indirect_calls; ie; ie = next_ie)
3082 {
3083 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3084 struct ipa_jump_func *jfunc;
3085 int param_index;
3086 cgraph_node *spec_target = NULL;
3087
3088 next_ie = ie->next_callee;
3089
3090 if (ici->param_index == -1)
3091 continue;
3092
3093 /* We must check range due to calls with variable number of arguments: */
3094 if (ici->param_index >= ipa_get_cs_argument_count (top))
3095 {
3096 ici->param_index = -1;
3097 continue;
3098 }
3099
3100 param_index = ici->param_index;
3101 jfunc = ipa_get_ith_jump_func (top, param_index);
3102
3103 if (ie->speculative)
3104 {
3105 struct cgraph_edge *de;
3106 struct ipa_ref *ref;
3107 ie->speculative_call_info (de, ie, ref);
3108 spec_target = de->callee;
3109 }
3110
3111 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3112 new_direct_edge = NULL;
3113 else if (ici->polymorphic)
3114 {
3115 ipa_polymorphic_call_context ctx;
3116 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3117 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3118 }
3119 else
3120 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3121 new_root_info);
3122 /* If speculation was removed, then we need to do nothing. */
3123 if (new_direct_edge && new_direct_edge != ie
3124 && new_direct_edge->callee == spec_target)
3125 {
3126 new_direct_edge->indirect_inlining_edge = 1;
3127 top = IPA_EDGE_REF (cs);
3128 res = true;
3129 if (!new_direct_edge->speculative)
3130 continue;
3131 }
3132 else if (new_direct_edge)
3133 {
3134 new_direct_edge->indirect_inlining_edge = 1;
3135 if (new_direct_edge->call_stmt)
3136 new_direct_edge->call_stmt_cannot_inline_p
3137 = !gimple_check_call_matching_types (
3138 new_direct_edge->call_stmt,
3139 new_direct_edge->callee->decl, false);
3140 if (new_edges)
3141 {
3142 new_edges->safe_push (new_direct_edge);
3143 res = true;
3144 }
3145 top = IPA_EDGE_REF (cs);
3146 /* If speculative edge was introduced we still need to update
3147 call info of the indirect edge. */
3148 if (!new_direct_edge->speculative)
3149 continue;
3150 }
3151 if (jfunc->type == IPA_JF_PASS_THROUGH
3152 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3153 {
3154 if (ici->agg_contents
3155 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3156 && !ici->polymorphic)
3157 ici->param_index = -1;
3158 else
3159 {
3160 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3161 if (ici->polymorphic
3162 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3163 ici->vptr_changed = true;
3164 }
3165 }
3166 else if (jfunc->type == IPA_JF_ANCESTOR)
3167 {
3168 if (ici->agg_contents
3169 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3170 && !ici->polymorphic)
3171 ici->param_index = -1;
3172 else
3173 {
3174 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3175 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3176 if (ici->polymorphic
3177 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3178 ici->vptr_changed = true;
3179 }
3180 }
3181 else
3182 /* Either we can find a destination for this edge now or never. */
3183 ici->param_index = -1;
3184 }
3185
3186 return res;
3187 }
3188
3189 /* Recursively traverse subtree of NODE (including node) made of inlined
3190 cgraph_edges when CS has been inlined and invoke
3191 update_indirect_edges_after_inlining on all nodes and
3192 update_jump_functions_after_inlining on all non-inlined edges that lead out
3193 of this subtree. Newly discovered indirect edges will be added to
3194 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3195 created. */
3196
3197 static bool
3198 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3199 struct cgraph_node *node,
3200 vec<cgraph_edge *> *new_edges)
3201 {
3202 struct cgraph_edge *e;
3203 bool res;
3204
3205 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3206
3207 for (e = node->callees; e; e = e->next_callee)
3208 if (!e->inline_failed)
3209 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3210 else
3211 update_jump_functions_after_inlining (cs, e);
3212 for (e = node->indirect_calls; e; e = e->next_callee)
3213 update_jump_functions_after_inlining (cs, e);
3214
3215 return res;
3216 }
3217
3218 /* Combine two controlled uses counts as done during inlining. */
3219
3220 static int
3221 combine_controlled_uses_counters (int c, int d)
3222 {
3223 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3224 return IPA_UNDESCRIBED_USE;
3225 else
3226 return c + d - 1;
3227 }
3228
3229 /* Propagate number of controlled users from CS->caleee to the new root of the
3230 tree of inlined nodes. */
3231
3232 static void
3233 propagate_controlled_uses (struct cgraph_edge *cs)
3234 {
3235 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3236 struct cgraph_node *new_root = cs->caller->global.inlined_to
3237 ? cs->caller->global.inlined_to : cs->caller;
3238 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3239 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3240 int count, i;
3241
3242 count = MIN (ipa_get_cs_argument_count (args),
3243 ipa_get_param_count (old_root_info));
3244 for (i = 0; i < count; i++)
3245 {
3246 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3247 struct ipa_cst_ref_desc *rdesc;
3248
3249 if (jf->type == IPA_JF_PASS_THROUGH)
3250 {
3251 int src_idx, c, d;
3252 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3253 c = ipa_get_controlled_uses (new_root_info, src_idx);
3254 d = ipa_get_controlled_uses (old_root_info, i);
3255
3256 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3257 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3258 c = combine_controlled_uses_counters (c, d);
3259 ipa_set_controlled_uses (new_root_info, src_idx, c);
3260 if (c == 0 && new_root_info->ipcp_orig_node)
3261 {
3262 struct cgraph_node *n;
3263 struct ipa_ref *ref;
3264 tree t = new_root_info->known_csts[src_idx];
3265
3266 if (t && TREE_CODE (t) == ADDR_EXPR
3267 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3268 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3269 && (ref = new_root->find_reference (n, NULL, 0)))
3270 {
3271 if (dump_file)
3272 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3273 "reference from %s/%i to %s/%i.\n",
3274 xstrdup_for_dump (new_root->name ()),
3275 new_root->order,
3276 xstrdup_for_dump (n->name ()), n->order);
3277 ref->remove_reference ();
3278 }
3279 }
3280 }
3281 else if (jf->type == IPA_JF_CONST
3282 && (rdesc = jfunc_rdesc_usable (jf)))
3283 {
3284 int d = ipa_get_controlled_uses (old_root_info, i);
3285 int c = rdesc->refcount;
3286 rdesc->refcount = combine_controlled_uses_counters (c, d);
3287 if (rdesc->refcount == 0)
3288 {
3289 tree cst = ipa_get_jf_constant (jf);
3290 struct cgraph_node *n;
3291 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3292 && TREE_CODE (TREE_OPERAND (cst, 0))
3293 == FUNCTION_DECL);
3294 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3295 if (n)
3296 {
3297 struct cgraph_node *clone;
3298 bool ok;
3299 ok = remove_described_reference (n, rdesc);
3300 gcc_checking_assert (ok);
3301
3302 clone = cs->caller;
3303 while (clone->global.inlined_to
3304 && clone != rdesc->cs->caller
3305 && IPA_NODE_REF (clone)->ipcp_orig_node)
3306 {
3307 struct ipa_ref *ref;
3308 ref = clone->find_reference (n, NULL, 0);
3309 if (ref)
3310 {
3311 if (dump_file)
3312 fprintf (dump_file, "ipa-prop: Removing "
3313 "cloning-created reference "
3314 "from %s/%i to %s/%i.\n",
3315 xstrdup_for_dump (clone->name ()),
3316 clone->order,
3317 xstrdup_for_dump (n->name ()),
3318 n->order);
3319 ref->remove_reference ();
3320 }
3321 clone = clone->callers->caller;
3322 }
3323 }
3324 }
3325 }
3326 }
3327
3328 for (i = ipa_get_param_count (old_root_info);
3329 i < ipa_get_cs_argument_count (args);
3330 i++)
3331 {
3332 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3333
3334 if (jf->type == IPA_JF_CONST)
3335 {
3336 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3337 if (rdesc)
3338 rdesc->refcount = IPA_UNDESCRIBED_USE;
3339 }
3340 else if (jf->type == IPA_JF_PASS_THROUGH)
3341 ipa_set_controlled_uses (new_root_info,
3342 jf->value.pass_through.formal_id,
3343 IPA_UNDESCRIBED_USE);
3344 }
3345 }
3346
3347 /* Update jump functions and call note functions on inlining the call site CS.
3348 CS is expected to lead to a node already cloned by
3349 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3350 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3351 created. */
3352
3353 bool
3354 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3355 vec<cgraph_edge *> *new_edges)
3356 {
3357 bool changed;
3358 /* Do nothing if the preparation phase has not been carried out yet
3359 (i.e. during early inlining). */
3360 if (!ipa_node_params_sum)
3361 return false;
3362 gcc_assert (ipa_edge_args_vector);
3363
3364 propagate_controlled_uses (cs);
3365 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3366
3367 return changed;
3368 }
3369
3370 /* Frees all dynamically allocated structures that the argument info points
3371 to. */
3372
3373 void
3374 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3375 {
3376 vec_free (args->jump_functions);
3377 memset (args, 0, sizeof (*args));
3378 }
3379
3380 /* Free all ipa_edge structures. */
3381
3382 void
3383 ipa_free_all_edge_args (void)
3384 {
3385 int i;
3386 struct ipa_edge_args *args;
3387
3388 if (!ipa_edge_args_vector)
3389 return;
3390
3391 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3392 ipa_free_edge_args_substructures (args);
3393
3394 vec_free (ipa_edge_args_vector);
3395 }
3396
3397 /* Frees all dynamically allocated structures that the param info points
3398 to. */
3399
3400 ipa_node_params::~ipa_node_params ()
3401 {
3402 descriptors.release ();
3403 free (lattices);
3404 /* Lattice values and their sources are deallocated with their alocation
3405 pool. */
3406 known_contexts.release ();
3407
3408 lattices = NULL;
3409 ipcp_orig_node = NULL;
3410 analysis_done = 0;
3411 node_enqueued = 0;
3412 do_clone_for_all_contexts = 0;
3413 is_all_contexts_clone = 0;
3414 node_dead = 0;
3415 }
3416
3417 /* Free all ipa_node_params structures. */
3418
3419 void
3420 ipa_free_all_node_params (void)
3421 {
3422 delete ipa_node_params_sum;
3423 ipa_node_params_sum = NULL;
3424 }
3425
3426 /* Grow ipcp_transformations if necessary. */
3427
3428 void
3429 ipcp_grow_transformations_if_necessary (void)
3430 {
3431 if (vec_safe_length (ipcp_transformations)
3432 <= (unsigned) symtab->cgraph_max_uid)
3433 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3434 }
3435
3436 /* Set the aggregate replacements of NODE to be AGGVALS. */
3437
3438 void
3439 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3440 struct ipa_agg_replacement_value *aggvals)
3441 {
3442 ipcp_grow_transformations_if_necessary ();
3443 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3444 }
3445
3446 /* Hook that is called by cgraph.c when an edge is removed. */
3447
3448 static void
3449 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3450 {
3451 struct ipa_edge_args *args;
3452
3453 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3454 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3455 return;
3456
3457 args = IPA_EDGE_REF (cs);
3458 if (args->jump_functions)
3459 {
3460 struct ipa_jump_func *jf;
3461 int i;
3462 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3463 {
3464 struct ipa_cst_ref_desc *rdesc;
3465 try_decrement_rdesc_refcount (jf);
3466 if (jf->type == IPA_JF_CONST
3467 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3468 && rdesc->cs == cs)
3469 rdesc->cs = NULL;
3470 }
3471 }
3472
3473 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3474 }
3475
3476 /* Hook that is called by cgraph.c when an edge is duplicated. */
3477
3478 static void
3479 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3480 void *)
3481 {
3482 struct ipa_edge_args *old_args, *new_args;
3483 unsigned int i;
3484
3485 ipa_check_create_edge_args ();
3486
3487 old_args = IPA_EDGE_REF (src);
3488 new_args = IPA_EDGE_REF (dst);
3489
3490 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3491 if (old_args->polymorphic_call_contexts)
3492 new_args->polymorphic_call_contexts
3493 = vec_safe_copy (old_args->polymorphic_call_contexts);
3494
3495 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3496 {
3497 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3498 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3499
3500 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3501
3502 if (src_jf->type == IPA_JF_CONST)
3503 {
3504 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3505
3506 if (!src_rdesc)
3507 dst_jf->value.constant.rdesc = NULL;
3508 else if (src->caller == dst->caller)
3509 {
3510 struct ipa_ref *ref;
3511 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3512 gcc_checking_assert (n);
3513 ref = src->caller->find_reference (n, src->call_stmt,
3514 src->lto_stmt_uid);
3515 gcc_checking_assert (ref);
3516 dst->caller->clone_reference (ref, ref->stmt);
3517
3518 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3519 dst_rdesc->cs = dst;
3520 dst_rdesc->refcount = src_rdesc->refcount;
3521 dst_rdesc->next_duplicate = NULL;
3522 dst_jf->value.constant.rdesc = dst_rdesc;
3523 }
3524 else if (src_rdesc->cs == src)
3525 {
3526 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3527 dst_rdesc->cs = dst;
3528 dst_rdesc->refcount = src_rdesc->refcount;
3529 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3530 src_rdesc->next_duplicate = dst_rdesc;
3531 dst_jf->value.constant.rdesc = dst_rdesc;
3532 }
3533 else
3534 {
3535 struct ipa_cst_ref_desc *dst_rdesc;
3536 /* This can happen during inlining, when a JFUNC can refer to a
3537 reference taken in a function up in the tree of inline clones.
3538 We need to find the duplicate that refers to our tree of
3539 inline clones. */
3540
3541 gcc_assert (dst->caller->global.inlined_to);
3542 for (dst_rdesc = src_rdesc->next_duplicate;
3543 dst_rdesc;
3544 dst_rdesc = dst_rdesc->next_duplicate)
3545 {
3546 struct cgraph_node *top;
3547 top = dst_rdesc->cs->caller->global.inlined_to
3548 ? dst_rdesc->cs->caller->global.inlined_to
3549 : dst_rdesc->cs->caller;
3550 if (dst->caller->global.inlined_to == top)
3551 break;
3552 }
3553 gcc_assert (dst_rdesc);
3554 dst_jf->value.constant.rdesc = dst_rdesc;
3555 }
3556 }
3557 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3558 && src->caller == dst->caller)
3559 {
3560 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3561 ? dst->caller->global.inlined_to : dst->caller;
3562 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3563 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3564
3565 int c = ipa_get_controlled_uses (root_info, idx);
3566 if (c != IPA_UNDESCRIBED_USE)
3567 {
3568 c++;
3569 ipa_set_controlled_uses (root_info, idx, c);
3570 }
3571 }
3572 }
3573 }
3574
3575 /* Analyze newly added function into callgraph. */
3576
3577 static void
3578 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3579 {
3580 if (node->has_gimple_body_p ())
3581 ipa_analyze_node (node);
3582 }
3583
3584 /* Hook that is called by summary when a node is duplicated. */
3585
3586 void
3587 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3588 ipa_node_params *old_info,
3589 ipa_node_params *new_info)
3590 {
3591 ipa_agg_replacement_value *old_av, *new_av;
3592
3593 new_info->descriptors = old_info->descriptors.copy ();
3594 new_info->lattices = NULL;
3595 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3596
3597 new_info->analysis_done = old_info->analysis_done;
3598 new_info->node_enqueued = old_info->node_enqueued;
3599
3600 old_av = ipa_get_agg_replacements_for_node (src);
3601 if (old_av)
3602 {
3603 new_av = NULL;
3604 while (old_av)
3605 {
3606 struct ipa_agg_replacement_value *v;
3607
3608 v = ggc_alloc<ipa_agg_replacement_value> ();
3609 memcpy (v, old_av, sizeof (*v));
3610 v->next = new_av;
3611 new_av = v;
3612 old_av = old_av->next;
3613 }
3614 ipa_set_node_agg_value_chain (dst, new_av);
3615 }
3616
3617 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3618
3619 if (src_trans && vec_safe_length (src_trans->alignments) > 0)
3620 {
3621 ipcp_grow_transformations_if_necessary ();
3622 src_trans = ipcp_get_transformation_summary (src);
3623 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3624 vec<ipa_alignment, va_gc> *&dst_alignments
3625 = ipcp_get_transformation_summary (dst)->alignments;
3626 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3627 for (unsigned i = 0; i < src_alignments->length (); ++i)
3628 dst_alignments->quick_push ((*src_alignments)[i]);
3629 }
3630 }
3631
3632 /* Register our cgraph hooks if they are not already there. */
3633
3634 void
3635 ipa_register_cgraph_hooks (void)
3636 {
3637 ipa_check_create_node_params ();
3638
3639 if (!edge_removal_hook_holder)
3640 edge_removal_hook_holder =
3641 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3642 if (!edge_duplication_hook_holder)
3643 edge_duplication_hook_holder =
3644 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3645 function_insertion_hook_holder =
3646 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3647 }
3648
3649 /* Unregister our cgraph hooks if they are not already there. */
3650
3651 static void
3652 ipa_unregister_cgraph_hooks (void)
3653 {
3654 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3655 edge_removal_hook_holder = NULL;
3656 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3657 edge_duplication_hook_holder = NULL;
3658 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3659 function_insertion_hook_holder = NULL;
3660 }
3661
3662 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3663 longer needed after ipa-cp. */
3664
3665 void
3666 ipa_free_all_structures_after_ipa_cp (void)
3667 {
3668 if (!optimize && !in_lto_p)
3669 {
3670 ipa_free_all_edge_args ();
3671 ipa_free_all_node_params ();
3672 free_alloc_pool (ipcp_sources_pool);
3673 free_alloc_pool (ipcp_cst_values_pool);
3674 free_alloc_pool (ipcp_poly_ctx_values_pool);
3675 free_alloc_pool (ipcp_agg_lattice_pool);
3676 ipa_unregister_cgraph_hooks ();
3677 ipa_refdesc_pool.release ();
3678 }
3679 }
3680
3681 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3682 longer needed after indirect inlining. */
3683
3684 void
3685 ipa_free_all_structures_after_iinln (void)
3686 {
3687 ipa_free_all_edge_args ();
3688 ipa_free_all_node_params ();
3689 ipa_unregister_cgraph_hooks ();
3690 if (ipcp_sources_pool)
3691 free_alloc_pool (ipcp_sources_pool);
3692 if (ipcp_cst_values_pool)
3693 free_alloc_pool (ipcp_cst_values_pool);
3694 if (ipcp_poly_ctx_values_pool)
3695 free_alloc_pool (ipcp_poly_ctx_values_pool);
3696 if (ipcp_agg_lattice_pool)
3697 free_alloc_pool (ipcp_agg_lattice_pool);
3698 ipa_refdesc_pool.release ();
3699 }
3700
3701 /* Print ipa_tree_map data structures of all functions in the
3702 callgraph to F. */
3703
3704 void
3705 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3706 {
3707 int i, count;
3708 struct ipa_node_params *info;
3709
3710 if (!node->definition)
3711 return;
3712 info = IPA_NODE_REF (node);
3713 fprintf (f, " function %s/%i parameter descriptors:\n",
3714 node->name (), node->order);
3715 count = ipa_get_param_count (info);
3716 for (i = 0; i < count; i++)
3717 {
3718 int c;
3719
3720 fprintf (f, " ");
3721 ipa_dump_param (f, info, i);
3722 if (ipa_is_param_used (info, i))
3723 fprintf (f, " used");
3724 c = ipa_get_controlled_uses (info, i);
3725 if (c == IPA_UNDESCRIBED_USE)
3726 fprintf (f, " undescribed_use");
3727 else
3728 fprintf (f, " controlled_uses=%i", c);
3729 fprintf (f, "\n");
3730 }
3731 }
3732
3733 /* Print ipa_tree_map data structures of all functions in the
3734 callgraph to F. */
3735
3736 void
3737 ipa_print_all_params (FILE * f)
3738 {
3739 struct cgraph_node *node;
3740
3741 fprintf (f, "\nFunction parameters:\n");
3742 FOR_EACH_FUNCTION (node)
3743 ipa_print_node_params (f, node);
3744 }
3745
3746 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3747
3748 vec<tree>
3749 ipa_get_vector_of_formal_parms (tree fndecl)
3750 {
3751 vec<tree> args;
3752 int count;
3753 tree parm;
3754
3755 gcc_assert (!flag_wpa);
3756 count = count_formal_params (fndecl);
3757 args.create (count);
3758 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3759 args.quick_push (parm);
3760
3761 return args;
3762 }
3763
3764 /* Return a heap allocated vector containing types of formal parameters of
3765 function type FNTYPE. */
3766
3767 vec<tree>
3768 ipa_get_vector_of_formal_parm_types (tree fntype)
3769 {
3770 vec<tree> types;
3771 int count = 0;
3772 tree t;
3773
3774 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3775 count++;
3776
3777 types.create (count);
3778 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3779 types.quick_push (TREE_VALUE (t));
3780
3781 return types;
3782 }
3783
3784 /* Modify the function declaration FNDECL and its type according to the plan in
3785 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3786 to reflect the actual parameters being modified which are determined by the
3787 base_index field. */
3788
3789 void
3790 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3791 {
3792 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3793 tree orig_type = TREE_TYPE (fndecl);
3794 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3795
3796 /* The following test is an ugly hack, some functions simply don't have any
3797 arguments in their type. This is probably a bug but well... */
3798 bool care_for_types = (old_arg_types != NULL_TREE);
3799 bool last_parm_void;
3800 vec<tree> otypes;
3801 if (care_for_types)
3802 {
3803 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3804 == void_type_node);
3805 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3806 if (last_parm_void)
3807 gcc_assert (oparms.length () + 1 == otypes.length ());
3808 else
3809 gcc_assert (oparms.length () == otypes.length ());
3810 }
3811 else
3812 {
3813 last_parm_void = false;
3814 otypes.create (0);
3815 }
3816
3817 int len = adjustments.length ();
3818 tree *link = &DECL_ARGUMENTS (fndecl);
3819 tree new_arg_types = NULL;
3820 for (int i = 0; i < len; i++)
3821 {
3822 struct ipa_parm_adjustment *adj;
3823 gcc_assert (link);
3824
3825 adj = &adjustments[i];
3826 tree parm;
3827 if (adj->op == IPA_PARM_OP_NEW)
3828 parm = NULL;
3829 else
3830 parm = oparms[adj->base_index];
3831 adj->base = parm;
3832
3833 if (adj->op == IPA_PARM_OP_COPY)
3834 {
3835 if (care_for_types)
3836 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3837 new_arg_types);
3838 *link = parm;
3839 link = &DECL_CHAIN (parm);
3840 }
3841 else if (adj->op != IPA_PARM_OP_REMOVE)
3842 {
3843 tree new_parm;
3844 tree ptype;
3845
3846 if (adj->by_ref)
3847 ptype = build_pointer_type (adj->type);
3848 else
3849 {
3850 ptype = adj->type;
3851 if (is_gimple_reg_type (ptype))
3852 {
3853 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3854 if (TYPE_ALIGN (ptype) < malign)
3855 ptype = build_aligned_type (ptype, malign);
3856 }
3857 }
3858
3859 if (care_for_types)
3860 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3861
3862 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3863 ptype);
3864 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3865 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3866 DECL_ARTIFICIAL (new_parm) = 1;
3867 DECL_ARG_TYPE (new_parm) = ptype;
3868 DECL_CONTEXT (new_parm) = fndecl;
3869 TREE_USED (new_parm) = 1;
3870 DECL_IGNORED_P (new_parm) = 1;
3871 layout_decl (new_parm, 0);
3872
3873 if (adj->op == IPA_PARM_OP_NEW)
3874 adj->base = NULL;
3875 else
3876 adj->base = parm;
3877 adj->new_decl = new_parm;
3878
3879 *link = new_parm;
3880 link = &DECL_CHAIN (new_parm);
3881 }
3882 }
3883
3884 *link = NULL_TREE;
3885
3886 tree new_reversed = NULL;
3887 if (care_for_types)
3888 {
3889 new_reversed = nreverse (new_arg_types);
3890 if (last_parm_void)
3891 {
3892 if (new_reversed)
3893 TREE_CHAIN (new_arg_types) = void_list_node;
3894 else
3895 new_reversed = void_list_node;
3896 }
3897 }
3898
3899 /* Use copy_node to preserve as much as possible from original type
3900 (debug info, attribute lists etc.)
3901 Exception is METHOD_TYPEs must have THIS argument.
3902 When we are asked to remove it, we need to build new FUNCTION_TYPE
3903 instead. */
3904 tree new_type = NULL;
3905 if (TREE_CODE (orig_type) != METHOD_TYPE
3906 || (adjustments[0].op == IPA_PARM_OP_COPY
3907 && adjustments[0].base_index == 0))
3908 {
3909 new_type = build_distinct_type_copy (orig_type);
3910 TYPE_ARG_TYPES (new_type) = new_reversed;
3911 }
3912 else
3913 {
3914 new_type
3915 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3916 new_reversed));
3917 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3918 DECL_VINDEX (fndecl) = NULL_TREE;
3919 }
3920
3921 /* When signature changes, we need to clear builtin info. */
3922 if (DECL_BUILT_IN (fndecl))
3923 {
3924 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3925 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3926 }
3927
3928 TREE_TYPE (fndecl) = new_type;
3929 DECL_VIRTUAL_P (fndecl) = 0;
3930 DECL_LANG_SPECIFIC (fndecl) = NULL;
3931 otypes.release ();
3932 oparms.release ();
3933 }
3934
3935 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3936 If this is a directly recursive call, CS must be NULL. Otherwise it must
3937 contain the corresponding call graph edge. */
3938
3939 void
3940 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3941 ipa_parm_adjustment_vec adjustments)
3942 {
3943 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3944 vec<tree> vargs;
3945 vec<tree, va_gc> **debug_args = NULL;
3946 gcall *new_stmt;
3947 gimple_stmt_iterator gsi, prev_gsi;
3948 tree callee_decl;
3949 int i, len;
3950
3951 len = adjustments.length ();
3952 vargs.create (len);
3953 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3954 current_node->remove_stmt_references (stmt);
3955
3956 gsi = gsi_for_stmt (stmt);
3957 prev_gsi = gsi;
3958 gsi_prev (&prev_gsi);
3959 for (i = 0; i < len; i++)
3960 {
3961 struct ipa_parm_adjustment *adj;
3962
3963 adj = &adjustments[i];
3964
3965 if (adj->op == IPA_PARM_OP_COPY)
3966 {
3967 tree arg = gimple_call_arg (stmt, adj->base_index);
3968
3969 vargs.quick_push (arg);
3970 }
3971 else if (adj->op != IPA_PARM_OP_REMOVE)
3972 {
3973 tree expr, base, off;
3974 location_t loc;
3975 unsigned int deref_align = 0;
3976 bool deref_base = false;
3977
3978 /* We create a new parameter out of the value of the old one, we can
3979 do the following kind of transformations:
3980
3981 - A scalar passed by reference is converted to a scalar passed by
3982 value. (adj->by_ref is false and the type of the original
3983 actual argument is a pointer to a scalar).
3984
3985 - A part of an aggregate is passed instead of the whole aggregate.
3986 The part can be passed either by value or by reference, this is
3987 determined by value of adj->by_ref. Moreover, the code below
3988 handles both situations when the original aggregate is passed by
3989 value (its type is not a pointer) and when it is passed by
3990 reference (it is a pointer to an aggregate).
3991
3992 When the new argument is passed by reference (adj->by_ref is true)
3993 it must be a part of an aggregate and therefore we form it by
3994 simply taking the address of a reference inside the original
3995 aggregate. */
3996
3997 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3998 base = gimple_call_arg (stmt, adj->base_index);
3999 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4000 : EXPR_LOCATION (base);
4001
4002 if (TREE_CODE (base) != ADDR_EXPR
4003 && POINTER_TYPE_P (TREE_TYPE (base)))
4004 off = build_int_cst (adj->alias_ptr_type,
4005 adj->offset / BITS_PER_UNIT);
4006 else
4007 {
4008 HOST_WIDE_INT base_offset;
4009 tree prev_base;
4010 bool addrof;
4011
4012 if (TREE_CODE (base) == ADDR_EXPR)
4013 {
4014 base = TREE_OPERAND (base, 0);
4015 addrof = true;
4016 }
4017 else
4018 addrof = false;
4019 prev_base = base;
4020 base = get_addr_base_and_unit_offset (base, &base_offset);
4021 /* Aggregate arguments can have non-invariant addresses. */
4022 if (!base)
4023 {
4024 base = build_fold_addr_expr (prev_base);
4025 off = build_int_cst (adj->alias_ptr_type,
4026 adj->offset / BITS_PER_UNIT);
4027 }
4028 else if (TREE_CODE (base) == MEM_REF)
4029 {
4030 if (!addrof)
4031 {
4032 deref_base = true;
4033 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4034 }
4035 off = build_int_cst (adj->alias_ptr_type,
4036 base_offset
4037 + adj->offset / BITS_PER_UNIT);
4038 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4039 off);
4040 base = TREE_OPERAND (base, 0);
4041 }
4042 else
4043 {
4044 off = build_int_cst (adj->alias_ptr_type,
4045 base_offset
4046 + adj->offset / BITS_PER_UNIT);
4047 base = build_fold_addr_expr (base);
4048 }
4049 }
4050
4051 if (!adj->by_ref)
4052 {
4053 tree type = adj->type;
4054 unsigned int align;
4055 unsigned HOST_WIDE_INT misalign;
4056
4057 if (deref_base)
4058 {
4059 align = deref_align;
4060 misalign = 0;
4061 }
4062 else
4063 {
4064 get_pointer_alignment_1 (base, &align, &misalign);
4065 if (TYPE_ALIGN (type) > align)
4066 align = TYPE_ALIGN (type);
4067 }
4068 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4069 * BITS_PER_UNIT);
4070 misalign = misalign & (align - 1);
4071 if (misalign != 0)
4072 align = (misalign & -misalign);
4073 if (align < TYPE_ALIGN (type))
4074 type = build_aligned_type (type, align);
4075 base = force_gimple_operand_gsi (&gsi, base,
4076 true, NULL, true, GSI_SAME_STMT);
4077 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4078 /* If expr is not a valid gimple call argument emit
4079 a load into a temporary. */
4080 if (is_gimple_reg_type (TREE_TYPE (expr)))
4081 {
4082 gimple tem = gimple_build_assign (NULL_TREE, expr);
4083 if (gimple_in_ssa_p (cfun))
4084 {
4085 gimple_set_vuse (tem, gimple_vuse (stmt));
4086 expr = make_ssa_name (TREE_TYPE (expr), tem);
4087 }
4088 else
4089 expr = create_tmp_reg (TREE_TYPE (expr));
4090 gimple_assign_set_lhs (tem, expr);
4091 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4092 }
4093 }
4094 else
4095 {
4096 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4097 expr = build_fold_addr_expr (expr);
4098 expr = force_gimple_operand_gsi (&gsi, expr,
4099 true, NULL, true, GSI_SAME_STMT);
4100 }
4101 vargs.quick_push (expr);
4102 }
4103 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4104 {
4105 unsigned int ix;
4106 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4107 gimple def_temp;
4108
4109 arg = gimple_call_arg (stmt, adj->base_index);
4110 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4111 {
4112 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4113 continue;
4114 arg = fold_convert_loc (gimple_location (stmt),
4115 TREE_TYPE (origin), arg);
4116 }
4117 if (debug_args == NULL)
4118 debug_args = decl_debug_args_insert (callee_decl);
4119 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4120 if (ddecl == origin)
4121 {
4122 ddecl = (**debug_args)[ix + 1];
4123 break;
4124 }
4125 if (ddecl == NULL)
4126 {
4127 ddecl = make_node (DEBUG_EXPR_DECL);
4128 DECL_ARTIFICIAL (ddecl) = 1;
4129 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4130 DECL_MODE (ddecl) = DECL_MODE (origin);
4131
4132 vec_safe_push (*debug_args, origin);
4133 vec_safe_push (*debug_args, ddecl);
4134 }
4135 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4136 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4137 }
4138 }
4139
4140 if (dump_file && (dump_flags & TDF_DETAILS))
4141 {
4142 fprintf (dump_file, "replacing stmt:");
4143 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4144 }
4145
4146 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4147 vargs.release ();
4148 if (gimple_call_lhs (stmt))
4149 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4150
4151 gimple_set_block (new_stmt, gimple_block (stmt));
4152 if (gimple_has_location (stmt))
4153 gimple_set_location (new_stmt, gimple_location (stmt));
4154 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4155 gimple_call_copy_flags (new_stmt, stmt);
4156 if (gimple_in_ssa_p (cfun))
4157 {
4158 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4159 if (gimple_vdef (stmt))
4160 {
4161 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4162 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4163 }
4164 }
4165
4166 if (dump_file && (dump_flags & TDF_DETAILS))
4167 {
4168 fprintf (dump_file, "with stmt:");
4169 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4170 fprintf (dump_file, "\n");
4171 }
4172 gsi_replace (&gsi, new_stmt, true);
4173 if (cs)
4174 cs->set_call_stmt (new_stmt);
4175 do
4176 {
4177 current_node->record_stmt_references (gsi_stmt (gsi));
4178 gsi_prev (&gsi);
4179 }
4180 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4181 }
4182
4183 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4184 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4185 specifies whether the function should care about type incompatibility the
4186 current and new expressions. If it is false, the function will leave
4187 incompatibility issues to the caller. Return true iff the expression
4188 was modified. */
4189
4190 bool
4191 ipa_modify_expr (tree *expr, bool convert,
4192 ipa_parm_adjustment_vec adjustments)
4193 {
4194 struct ipa_parm_adjustment *cand
4195 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4196 if (!cand)
4197 return false;
4198
4199 tree src;
4200 if (cand->by_ref)
4201 src = build_simple_mem_ref (cand->new_decl);
4202 else
4203 src = cand->new_decl;
4204
4205 if (dump_file && (dump_flags & TDF_DETAILS))
4206 {
4207 fprintf (dump_file, "About to replace expr ");
4208 print_generic_expr (dump_file, *expr, 0);
4209 fprintf (dump_file, " with ");
4210 print_generic_expr (dump_file, src, 0);
4211 fprintf (dump_file, "\n");
4212 }
4213
4214 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4215 {
4216 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4217 *expr = vce;
4218 }
4219 else
4220 *expr = src;
4221 return true;
4222 }
4223
4224 /* If T is an SSA_NAME, return NULL if it is not a default def or
4225 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4226 the base variable is always returned, regardless if it is a default
4227 def. Return T if it is not an SSA_NAME. */
4228
4229 static tree
4230 get_ssa_base_param (tree t, bool ignore_default_def)
4231 {
4232 if (TREE_CODE (t) == SSA_NAME)
4233 {
4234 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4235 return SSA_NAME_VAR (t);
4236 else
4237 return NULL_TREE;
4238 }
4239 return t;
4240 }
4241
4242 /* Given an expression, return an adjustment entry specifying the
4243 transformation to be done on EXPR. If no suitable adjustment entry
4244 was found, returns NULL.
4245
4246 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4247 default def, otherwise bail on them.
4248
4249 If CONVERT is non-NULL, this function will set *CONVERT if the
4250 expression provided is a component reference. ADJUSTMENTS is the
4251 adjustments vector. */
4252
4253 ipa_parm_adjustment *
4254 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4255 ipa_parm_adjustment_vec adjustments,
4256 bool ignore_default_def)
4257 {
4258 if (TREE_CODE (**expr) == BIT_FIELD_REF
4259 || TREE_CODE (**expr) == IMAGPART_EXPR
4260 || TREE_CODE (**expr) == REALPART_EXPR)
4261 {
4262 *expr = &TREE_OPERAND (**expr, 0);
4263 if (convert)
4264 *convert = true;
4265 }
4266
4267 HOST_WIDE_INT offset, size, max_size;
4268 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4269 if (!base || size == -1 || max_size == -1)
4270 return NULL;
4271
4272 if (TREE_CODE (base) == MEM_REF)
4273 {
4274 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4275 base = TREE_OPERAND (base, 0);
4276 }
4277
4278 base = get_ssa_base_param (base, ignore_default_def);
4279 if (!base || TREE_CODE (base) != PARM_DECL)
4280 return NULL;
4281
4282 struct ipa_parm_adjustment *cand = NULL;
4283 unsigned int len = adjustments.length ();
4284 for (unsigned i = 0; i < len; i++)
4285 {
4286 struct ipa_parm_adjustment *adj = &adjustments[i];
4287
4288 if (adj->base == base
4289 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4290 {
4291 cand = adj;
4292 break;
4293 }
4294 }
4295
4296 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4297 return NULL;
4298 return cand;
4299 }
4300
4301 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4302
4303 static bool
4304 index_in_adjustments_multiple_times_p (int base_index,
4305 ipa_parm_adjustment_vec adjustments)
4306 {
4307 int i, len = adjustments.length ();
4308 bool one = false;
4309
4310 for (i = 0; i < len; i++)
4311 {
4312 struct ipa_parm_adjustment *adj;
4313 adj = &adjustments[i];
4314
4315 if (adj->base_index == base_index)
4316 {
4317 if (one)
4318 return true;
4319 else
4320 one = true;
4321 }
4322 }
4323 return false;
4324 }
4325
4326
4327 /* Return adjustments that should have the same effect on function parameters
4328 and call arguments as if they were first changed according to adjustments in
4329 INNER and then by adjustments in OUTER. */
4330
4331 ipa_parm_adjustment_vec
4332 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4333 ipa_parm_adjustment_vec outer)
4334 {
4335 int i, outlen = outer.length ();
4336 int inlen = inner.length ();
4337 int removals = 0;
4338 ipa_parm_adjustment_vec adjustments, tmp;
4339
4340 tmp.create (inlen);
4341 for (i = 0; i < inlen; i++)
4342 {
4343 struct ipa_parm_adjustment *n;
4344 n = &inner[i];
4345
4346 if (n->op == IPA_PARM_OP_REMOVE)
4347 removals++;
4348 else
4349 {
4350 /* FIXME: Handling of new arguments are not implemented yet. */
4351 gcc_assert (n->op != IPA_PARM_OP_NEW);
4352 tmp.quick_push (*n);
4353 }
4354 }
4355
4356 adjustments.create (outlen + removals);
4357 for (i = 0; i < outlen; i++)
4358 {
4359 struct ipa_parm_adjustment r;
4360 struct ipa_parm_adjustment *out = &outer[i];
4361 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4362
4363 memset (&r, 0, sizeof (r));
4364 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4365 if (out->op == IPA_PARM_OP_REMOVE)
4366 {
4367 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4368 {
4369 r.op = IPA_PARM_OP_REMOVE;
4370 adjustments.quick_push (r);
4371 }
4372 continue;
4373 }
4374 else
4375 {
4376 /* FIXME: Handling of new arguments are not implemented yet. */
4377 gcc_assert (out->op != IPA_PARM_OP_NEW);
4378 }
4379
4380 r.base_index = in->base_index;
4381 r.type = out->type;
4382
4383 /* FIXME: Create nonlocal value too. */
4384
4385 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4386 r.op = IPA_PARM_OP_COPY;
4387 else if (in->op == IPA_PARM_OP_COPY)
4388 r.offset = out->offset;
4389 else if (out->op == IPA_PARM_OP_COPY)
4390 r.offset = in->offset;
4391 else
4392 r.offset = in->offset + out->offset;
4393 adjustments.quick_push (r);
4394 }
4395
4396 for (i = 0; i < inlen; i++)
4397 {
4398 struct ipa_parm_adjustment *n = &inner[i];
4399
4400 if (n->op == IPA_PARM_OP_REMOVE)
4401 adjustments.quick_push (*n);
4402 }
4403
4404 tmp.release ();
4405 return adjustments;
4406 }
4407
4408 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4409 friendly way, assuming they are meant to be applied to FNDECL. */
4410
4411 void
4412 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4413 tree fndecl)
4414 {
4415 int i, len = adjustments.length ();
4416 bool first = true;
4417 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4418
4419 fprintf (file, "IPA param adjustments: ");
4420 for (i = 0; i < len; i++)
4421 {
4422 struct ipa_parm_adjustment *adj;
4423 adj = &adjustments[i];
4424
4425 if (!first)
4426 fprintf (file, " ");
4427 else
4428 first = false;
4429
4430 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4431 print_generic_expr (file, parms[adj->base_index], 0);
4432 if (adj->base)
4433 {
4434 fprintf (file, ", base: ");
4435 print_generic_expr (file, adj->base, 0);
4436 }
4437 if (adj->new_decl)
4438 {
4439 fprintf (file, ", new_decl: ");
4440 print_generic_expr (file, adj->new_decl, 0);
4441 }
4442 if (adj->new_ssa_base)
4443 {
4444 fprintf (file, ", new_ssa_base: ");
4445 print_generic_expr (file, adj->new_ssa_base, 0);
4446 }
4447
4448 if (adj->op == IPA_PARM_OP_COPY)
4449 fprintf (file, ", copy_param");
4450 else if (adj->op == IPA_PARM_OP_REMOVE)
4451 fprintf (file, ", remove_param");
4452 else
4453 fprintf (file, ", offset %li", (long) adj->offset);
4454 if (adj->by_ref)
4455 fprintf (file, ", by_ref");
4456 print_node_brief (file, ", type: ", adj->type, 0);
4457 fprintf (file, "\n");
4458 }
4459 parms.release ();
4460 }
4461
4462 /* Dump the AV linked list. */
4463
4464 void
4465 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4466 {
4467 bool comma = false;
4468 fprintf (f, " Aggregate replacements:");
4469 for (; av; av = av->next)
4470 {
4471 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4472 av->index, av->offset);
4473 print_generic_expr (f, av->value, 0);
4474 comma = true;
4475 }
4476 fprintf (f, "\n");
4477 }
4478
4479 /* Stream out jump function JUMP_FUNC to OB. */
4480
4481 static void
4482 ipa_write_jump_function (struct output_block *ob,
4483 struct ipa_jump_func *jump_func)
4484 {
4485 struct ipa_agg_jf_item *item;
4486 struct bitpack_d bp;
4487 int i, count;
4488
4489 streamer_write_uhwi (ob, jump_func->type);
4490 switch (jump_func->type)
4491 {
4492 case IPA_JF_UNKNOWN:
4493 break;
4494 case IPA_JF_CONST:
4495 gcc_assert (
4496 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4497 stream_write_tree (ob, jump_func->value.constant.value, true);
4498 break;
4499 case IPA_JF_PASS_THROUGH:
4500 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4501 if (jump_func->value.pass_through.operation == NOP_EXPR)
4502 {
4503 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4504 bp = bitpack_create (ob->main_stream);
4505 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4506 streamer_write_bitpack (&bp);
4507 }
4508 else
4509 {
4510 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4511 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4512 }
4513 break;
4514 case IPA_JF_ANCESTOR:
4515 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4516 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4517 bp = bitpack_create (ob->main_stream);
4518 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4519 streamer_write_bitpack (&bp);
4520 break;
4521 }
4522
4523 count = vec_safe_length (jump_func->agg.items);
4524 streamer_write_uhwi (ob, count);
4525 if (count)
4526 {
4527 bp = bitpack_create (ob->main_stream);
4528 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4529 streamer_write_bitpack (&bp);
4530 }
4531
4532 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4533 {
4534 streamer_write_uhwi (ob, item->offset);
4535 stream_write_tree (ob, item->value, true);
4536 }
4537
4538 bp = bitpack_create (ob->main_stream);
4539 bp_pack_value (&bp, jump_func->alignment.known, 1);
4540 streamer_write_bitpack (&bp);
4541 if (jump_func->alignment.known)
4542 {
4543 streamer_write_uhwi (ob, jump_func->alignment.align);
4544 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4545 }
4546 }
4547
4548 /* Read in jump function JUMP_FUNC from IB. */
4549
4550 static void
4551 ipa_read_jump_function (struct lto_input_block *ib,
4552 struct ipa_jump_func *jump_func,
4553 struct cgraph_edge *cs,
4554 struct data_in *data_in)
4555 {
4556 enum jump_func_type jftype;
4557 enum tree_code operation;
4558 int i, count;
4559
4560 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4561 switch (jftype)
4562 {
4563 case IPA_JF_UNKNOWN:
4564 ipa_set_jf_unknown (jump_func);
4565 break;
4566 case IPA_JF_CONST:
4567 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4568 break;
4569 case IPA_JF_PASS_THROUGH:
4570 operation = (enum tree_code) streamer_read_uhwi (ib);
4571 if (operation == NOP_EXPR)
4572 {
4573 int formal_id = streamer_read_uhwi (ib);
4574 struct bitpack_d bp = streamer_read_bitpack (ib);
4575 bool agg_preserved = bp_unpack_value (&bp, 1);
4576 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4577 }
4578 else
4579 {
4580 tree operand = stream_read_tree (ib, data_in);
4581 int formal_id = streamer_read_uhwi (ib);
4582 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4583 operation);
4584 }
4585 break;
4586 case IPA_JF_ANCESTOR:
4587 {
4588 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4589 int formal_id = streamer_read_uhwi (ib);
4590 struct bitpack_d bp = streamer_read_bitpack (ib);
4591 bool agg_preserved = bp_unpack_value (&bp, 1);
4592 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4593 break;
4594 }
4595 }
4596
4597 count = streamer_read_uhwi (ib);
4598 vec_alloc (jump_func->agg.items, count);
4599 if (count)
4600 {
4601 struct bitpack_d bp = streamer_read_bitpack (ib);
4602 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4603 }
4604 for (i = 0; i < count; i++)
4605 {
4606 struct ipa_agg_jf_item item;
4607 item.offset = streamer_read_uhwi (ib);
4608 item.value = stream_read_tree (ib, data_in);
4609 jump_func->agg.items->quick_push (item);
4610 }
4611
4612 struct bitpack_d bp = streamer_read_bitpack (ib);
4613 bool alignment_known = bp_unpack_value (&bp, 1);
4614 if (alignment_known)
4615 {
4616 jump_func->alignment.known = true;
4617 jump_func->alignment.align = streamer_read_uhwi (ib);
4618 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4619 }
4620 else
4621 jump_func->alignment.known = false;
4622 }
4623
4624 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4625 relevant to indirect inlining to OB. */
4626
4627 static void
4628 ipa_write_indirect_edge_info (struct output_block *ob,
4629 struct cgraph_edge *cs)
4630 {
4631 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4632 struct bitpack_d bp;
4633
4634 streamer_write_hwi (ob, ii->param_index);
4635 bp = bitpack_create (ob->main_stream);
4636 bp_pack_value (&bp, ii->polymorphic, 1);
4637 bp_pack_value (&bp, ii->agg_contents, 1);
4638 bp_pack_value (&bp, ii->member_ptr, 1);
4639 bp_pack_value (&bp, ii->by_ref, 1);
4640 bp_pack_value (&bp, ii->vptr_changed, 1);
4641 streamer_write_bitpack (&bp);
4642 if (ii->agg_contents || ii->polymorphic)
4643 streamer_write_hwi (ob, ii->offset);
4644 else
4645 gcc_assert (ii->offset == 0);
4646
4647 if (ii->polymorphic)
4648 {
4649 streamer_write_hwi (ob, ii->otr_token);
4650 stream_write_tree (ob, ii->otr_type, true);
4651 ii->context.stream_out (ob);
4652 }
4653 }
4654
4655 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4656 relevant to indirect inlining from IB. */
4657
4658 static void
4659 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4660 struct data_in *data_in,
4661 struct cgraph_edge *cs)
4662 {
4663 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4664 struct bitpack_d bp;
4665
4666 ii->param_index = (int) streamer_read_hwi (ib);
4667 bp = streamer_read_bitpack (ib);
4668 ii->polymorphic = bp_unpack_value (&bp, 1);
4669 ii->agg_contents = bp_unpack_value (&bp, 1);
4670 ii->member_ptr = bp_unpack_value (&bp, 1);
4671 ii->by_ref = bp_unpack_value (&bp, 1);
4672 ii->vptr_changed = bp_unpack_value (&bp, 1);
4673 if (ii->agg_contents || ii->polymorphic)
4674 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4675 else
4676 ii->offset = 0;
4677 if (ii->polymorphic)
4678 {
4679 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4680 ii->otr_type = stream_read_tree (ib, data_in);
4681 ii->context.stream_in (ib, data_in);
4682 }
4683 }
4684
4685 /* Stream out NODE info to OB. */
4686
4687 static void
4688 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4689 {
4690 int node_ref;
4691 lto_symtab_encoder_t encoder;
4692 struct ipa_node_params *info = IPA_NODE_REF (node);
4693 int j;
4694 struct cgraph_edge *e;
4695 struct bitpack_d bp;
4696
4697 encoder = ob->decl_state->symtab_node_encoder;
4698 node_ref = lto_symtab_encoder_encode (encoder, node);
4699 streamer_write_uhwi (ob, node_ref);
4700
4701 streamer_write_uhwi (ob, ipa_get_param_count (info));
4702 for (j = 0; j < ipa_get_param_count (info); j++)
4703 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4704 bp = bitpack_create (ob->main_stream);
4705 gcc_assert (info->analysis_done
4706 || ipa_get_param_count (info) == 0);
4707 gcc_assert (!info->node_enqueued);
4708 gcc_assert (!info->ipcp_orig_node);
4709 for (j = 0; j < ipa_get_param_count (info); j++)
4710 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4711 streamer_write_bitpack (&bp);
4712 for (j = 0; j < ipa_get_param_count (info); j++)
4713 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4714 for (e = node->callees; e; e = e->next_callee)
4715 {
4716 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4717
4718 streamer_write_uhwi (ob,
4719 ipa_get_cs_argument_count (args) * 2
4720 + (args->polymorphic_call_contexts != NULL));
4721 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4722 {
4723 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4724 if (args->polymorphic_call_contexts != NULL)
4725 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4726 }
4727 }
4728 for (e = node->indirect_calls; e; e = e->next_callee)
4729 {
4730 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4731
4732 streamer_write_uhwi (ob,
4733 ipa_get_cs_argument_count (args) * 2
4734 + (args->polymorphic_call_contexts != NULL));
4735 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4736 {
4737 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4738 if (args->polymorphic_call_contexts != NULL)
4739 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4740 }
4741 ipa_write_indirect_edge_info (ob, e);
4742 }
4743 }
4744
4745 /* Stream in NODE info from IB. */
4746
4747 static void
4748 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4749 struct data_in *data_in)
4750 {
4751 struct ipa_node_params *info = IPA_NODE_REF (node);
4752 int k;
4753 struct cgraph_edge *e;
4754 struct bitpack_d bp;
4755
4756 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4757
4758 for (k = 0; k < ipa_get_param_count (info); k++)
4759 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4760
4761 bp = streamer_read_bitpack (ib);
4762 if (ipa_get_param_count (info) != 0)
4763 info->analysis_done = true;
4764 info->node_enqueued = false;
4765 for (k = 0; k < ipa_get_param_count (info); k++)
4766 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4767 for (k = 0; k < ipa_get_param_count (info); k++)
4768 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4769 for (e = node->callees; e; e = e->next_callee)
4770 {
4771 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4772 int count = streamer_read_uhwi (ib);
4773 bool contexts_computed = count & 1;
4774 count /= 2;
4775
4776 if (!count)
4777 continue;
4778 vec_safe_grow_cleared (args->jump_functions, count);
4779 if (contexts_computed)
4780 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4781
4782 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4783 {
4784 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4785 data_in);
4786 if (contexts_computed)
4787 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4788 }
4789 }
4790 for (e = node->indirect_calls; e; e = e->next_callee)
4791 {
4792 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4793 int count = streamer_read_uhwi (ib);
4794 bool contexts_computed = count & 1;
4795 count /= 2;
4796
4797 if (count)
4798 {
4799 vec_safe_grow_cleared (args->jump_functions, count);
4800 if (contexts_computed)
4801 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4802 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4803 {
4804 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4805 data_in);
4806 if (contexts_computed)
4807 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4808 }
4809 }
4810 ipa_read_indirect_edge_info (ib, data_in, e);
4811 }
4812 }
4813
4814 /* Write jump functions for nodes in SET. */
4815
4816 void
4817 ipa_prop_write_jump_functions (void)
4818 {
4819 struct cgraph_node *node;
4820 struct output_block *ob;
4821 unsigned int count = 0;
4822 lto_symtab_encoder_iterator lsei;
4823 lto_symtab_encoder_t encoder;
4824
4825 if (!ipa_node_params_sum)
4826 return;
4827
4828 ob = create_output_block (LTO_section_jump_functions);
4829 encoder = ob->decl_state->symtab_node_encoder;
4830 ob->symbol = NULL;
4831 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4832 lsei_next_function_in_partition (&lsei))
4833 {
4834 node = lsei_cgraph_node (lsei);
4835 if (node->has_gimple_body_p ()
4836 && IPA_NODE_REF (node) != NULL)
4837 count++;
4838 }
4839
4840 streamer_write_uhwi (ob, count);
4841
4842 /* Process all of the functions. */
4843 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4844 lsei_next_function_in_partition (&lsei))
4845 {
4846 node = lsei_cgraph_node (lsei);
4847 if (node->has_gimple_body_p ()
4848 && IPA_NODE_REF (node) != NULL)
4849 ipa_write_node_info (ob, node);
4850 }
4851 streamer_write_char_stream (ob->main_stream, 0);
4852 produce_asm (ob, NULL);
4853 destroy_output_block (ob);
4854 }
4855
4856 /* Read section in file FILE_DATA of length LEN with data DATA. */
4857
4858 static void
4859 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4860 size_t len)
4861 {
4862 const struct lto_function_header *header =
4863 (const struct lto_function_header *) data;
4864 const int cfg_offset = sizeof (struct lto_function_header);
4865 const int main_offset = cfg_offset + header->cfg_size;
4866 const int string_offset = main_offset + header->main_size;
4867 struct data_in *data_in;
4868 unsigned int i;
4869 unsigned int count;
4870
4871 lto_input_block ib_main ((const char *) data + main_offset,
4872 header->main_size, file_data->mode_table);
4873
4874 data_in =
4875 lto_data_in_create (file_data, (const char *) data + string_offset,
4876 header->string_size, vNULL);
4877 count = streamer_read_uhwi (&ib_main);
4878
4879 for (i = 0; i < count; i++)
4880 {
4881 unsigned int index;
4882 struct cgraph_node *node;
4883 lto_symtab_encoder_t encoder;
4884
4885 index = streamer_read_uhwi (&ib_main);
4886 encoder = file_data->symtab_node_encoder;
4887 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4888 index));
4889 gcc_assert (node->definition);
4890 ipa_read_node_info (&ib_main, node, data_in);
4891 }
4892 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4893 len);
4894 lto_data_in_delete (data_in);
4895 }
4896
4897 /* Read ipcp jump functions. */
4898
4899 void
4900 ipa_prop_read_jump_functions (void)
4901 {
4902 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4903 struct lto_file_decl_data *file_data;
4904 unsigned int j = 0;
4905
4906 ipa_check_create_node_params ();
4907 ipa_check_create_edge_args ();
4908 ipa_register_cgraph_hooks ();
4909
4910 while ((file_data = file_data_vec[j++]))
4911 {
4912 size_t len;
4913 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4914
4915 if (data)
4916 ipa_prop_read_section (file_data, data, len);
4917 }
4918 }
4919
4920 /* After merging units, we can get mismatch in argument counts.
4921 Also decl merging might've rendered parameter lists obsolete.
4922 Also compute called_with_variable_arg info. */
4923
4924 void
4925 ipa_update_after_lto_read (void)
4926 {
4927 ipa_check_create_node_params ();
4928 ipa_check_create_edge_args ();
4929 }
4930
4931 void
4932 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4933 {
4934 int node_ref;
4935 unsigned int count = 0;
4936 lto_symtab_encoder_t encoder;
4937 struct ipa_agg_replacement_value *aggvals, *av;
4938
4939 aggvals = ipa_get_agg_replacements_for_node (node);
4940 encoder = ob->decl_state->symtab_node_encoder;
4941 node_ref = lto_symtab_encoder_encode (encoder, node);
4942 streamer_write_uhwi (ob, node_ref);
4943
4944 for (av = aggvals; av; av = av->next)
4945 count++;
4946 streamer_write_uhwi (ob, count);
4947
4948 for (av = aggvals; av; av = av->next)
4949 {
4950 struct bitpack_d bp;
4951
4952 streamer_write_uhwi (ob, av->offset);
4953 streamer_write_uhwi (ob, av->index);
4954 stream_write_tree (ob, av->value, true);
4955
4956 bp = bitpack_create (ob->main_stream);
4957 bp_pack_value (&bp, av->by_ref, 1);
4958 streamer_write_bitpack (&bp);
4959 }
4960
4961 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4962 if (ts && vec_safe_length (ts->alignments) > 0)
4963 {
4964 count = ts->alignments->length ();
4965
4966 streamer_write_uhwi (ob, count);
4967 for (unsigned i = 0; i < count; ++i)
4968 {
4969 ipa_alignment *parm_al = &(*ts->alignments)[i];
4970
4971 struct bitpack_d bp;
4972 bp = bitpack_create (ob->main_stream);
4973 bp_pack_value (&bp, parm_al->known, 1);
4974 streamer_write_bitpack (&bp);
4975 if (parm_al->known)
4976 {
4977 streamer_write_uhwi (ob, parm_al->align);
4978 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
4979 parm_al->misalign);
4980 }
4981 }
4982 }
4983 else
4984 streamer_write_uhwi (ob, 0);
4985 }
4986
4987 /* Stream in the aggregate value replacement chain for NODE from IB. */
4988
4989 static void
4990 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4991 data_in *data_in)
4992 {
4993 struct ipa_agg_replacement_value *aggvals = NULL;
4994 unsigned int count, i;
4995
4996 count = streamer_read_uhwi (ib);
4997 for (i = 0; i <count; i++)
4998 {
4999 struct ipa_agg_replacement_value *av;
5000 struct bitpack_d bp;
5001
5002 av = ggc_alloc<ipa_agg_replacement_value> ();
5003 av->offset = streamer_read_uhwi (ib);
5004 av->index = streamer_read_uhwi (ib);
5005 av->value = stream_read_tree (ib, data_in);
5006 bp = streamer_read_bitpack (ib);
5007 av->by_ref = bp_unpack_value (&bp, 1);
5008 av->next = aggvals;
5009 aggvals = av;
5010 }
5011 ipa_set_node_agg_value_chain (node, aggvals);
5012
5013 count = streamer_read_uhwi (ib);
5014 if (count > 0)
5015 {
5016 ipcp_grow_transformations_if_necessary ();
5017
5018 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5019 vec_safe_grow_cleared (ts->alignments, count);
5020
5021 for (i = 0; i < count; i++)
5022 {
5023 ipa_alignment *parm_al;
5024 parm_al = &(*ts->alignments)[i];
5025 struct bitpack_d bp;
5026 bp = streamer_read_bitpack (ib);
5027 parm_al->known = bp_unpack_value (&bp, 1);
5028 if (parm_al->known)
5029 {
5030 parm_al->align = streamer_read_uhwi (ib);
5031 parm_al->misalign
5032 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
5033 0, parm_al->align);
5034 }
5035 }
5036 }
5037 }
5038
5039 /* Write all aggregate replacement for nodes in set. */
5040
5041 void
5042 ipcp_write_transformation_summaries (void)
5043 {
5044 struct cgraph_node *node;
5045 struct output_block *ob;
5046 unsigned int count = 0;
5047 lto_symtab_encoder_iterator lsei;
5048 lto_symtab_encoder_t encoder;
5049
5050 ob = create_output_block (LTO_section_ipcp_transform);
5051 encoder = ob->decl_state->symtab_node_encoder;
5052 ob->symbol = NULL;
5053 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5054 lsei_next_function_in_partition (&lsei))
5055 {
5056 node = lsei_cgraph_node (lsei);
5057 if (node->has_gimple_body_p ())
5058 count++;
5059 }
5060
5061 streamer_write_uhwi (ob, count);
5062
5063 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5064 lsei_next_function_in_partition (&lsei))
5065 {
5066 node = lsei_cgraph_node (lsei);
5067 if (node->has_gimple_body_p ())
5068 write_ipcp_transformation_info (ob, node);
5069 }
5070 streamer_write_char_stream (ob->main_stream, 0);
5071 produce_asm (ob, NULL);
5072 destroy_output_block (ob);
5073 }
5074
5075 /* Read replacements section in file FILE_DATA of length LEN with data
5076 DATA. */
5077
5078 static void
5079 read_replacements_section (struct lto_file_decl_data *file_data,
5080 const char *data,
5081 size_t len)
5082 {
5083 const struct lto_function_header *header =
5084 (const struct lto_function_header *) data;
5085 const int cfg_offset = sizeof (struct lto_function_header);
5086 const int main_offset = cfg_offset + header->cfg_size;
5087 const int string_offset = main_offset + header->main_size;
5088 struct data_in *data_in;
5089 unsigned int i;
5090 unsigned int count;
5091
5092 lto_input_block ib_main ((const char *) data + main_offset,
5093 header->main_size, file_data->mode_table);
5094
5095 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5096 header->string_size, vNULL);
5097 count = streamer_read_uhwi (&ib_main);
5098
5099 for (i = 0; i < count; i++)
5100 {
5101 unsigned int index;
5102 struct cgraph_node *node;
5103 lto_symtab_encoder_t encoder;
5104
5105 index = streamer_read_uhwi (&ib_main);
5106 encoder = file_data->symtab_node_encoder;
5107 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5108 index));
5109 gcc_assert (node->definition);
5110 read_ipcp_transformation_info (&ib_main, node, data_in);
5111 }
5112 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5113 len);
5114 lto_data_in_delete (data_in);
5115 }
5116
5117 /* Read IPA-CP aggregate replacements. */
5118
5119 void
5120 ipcp_read_transformation_summaries (void)
5121 {
5122 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5123 struct lto_file_decl_data *file_data;
5124 unsigned int j = 0;
5125
5126 while ((file_data = file_data_vec[j++]))
5127 {
5128 size_t len;
5129 const char *data = lto_get_section_data (file_data,
5130 LTO_section_ipcp_transform,
5131 NULL, &len);
5132 if (data)
5133 read_replacements_section (file_data, data, len);
5134 }
5135 }
5136
5137 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5138 NODE. */
5139
5140 static void
5141 adjust_agg_replacement_values (struct cgraph_node *node,
5142 struct ipa_agg_replacement_value *aggval)
5143 {
5144 struct ipa_agg_replacement_value *v;
5145 int i, c = 0, d = 0, *adj;
5146
5147 if (!node->clone.combined_args_to_skip)
5148 return;
5149
5150 for (v = aggval; v; v = v->next)
5151 {
5152 gcc_assert (v->index >= 0);
5153 if (c < v->index)
5154 c = v->index;
5155 }
5156 c++;
5157
5158 adj = XALLOCAVEC (int, c);
5159 for (i = 0; i < c; i++)
5160 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5161 {
5162 adj[i] = -1;
5163 d++;
5164 }
5165 else
5166 adj[i] = i - d;
5167
5168 for (v = aggval; v; v = v->next)
5169 v->index = adj[v->index];
5170 }
5171
5172 /* Dominator walker driving the ipcp modification phase. */
5173
5174 class ipcp_modif_dom_walker : public dom_walker
5175 {
5176 public:
5177 ipcp_modif_dom_walker (struct func_body_info *fbi,
5178 vec<ipa_param_descriptor> descs,
5179 struct ipa_agg_replacement_value *av,
5180 bool *sc, bool *cc)
5181 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5182 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5183
5184 virtual void before_dom_children (basic_block);
5185
5186 private:
5187 struct func_body_info *m_fbi;
5188 vec<ipa_param_descriptor> m_descriptors;
5189 struct ipa_agg_replacement_value *m_aggval;
5190 bool *m_something_changed, *m_cfg_changed;
5191 };
5192
5193 void
5194 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5195 {
5196 gimple_stmt_iterator gsi;
5197 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5198 {
5199 struct ipa_agg_replacement_value *v;
5200 gimple stmt = gsi_stmt (gsi);
5201 tree rhs, val, t;
5202 HOST_WIDE_INT offset, size;
5203 int index;
5204 bool by_ref, vce;
5205
5206 if (!gimple_assign_load_p (stmt))
5207 continue;
5208 rhs = gimple_assign_rhs1 (stmt);
5209 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5210 continue;
5211
5212 vce = false;
5213 t = rhs;
5214 while (handled_component_p (t))
5215 {
5216 /* V_C_E can do things like convert an array of integers to one
5217 bigger integer and similar things we do not handle below. */
5218 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5219 {
5220 vce = true;
5221 break;
5222 }
5223 t = TREE_OPERAND (t, 0);
5224 }
5225 if (vce)
5226 continue;
5227
5228 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5229 &offset, &size, &by_ref))
5230 continue;
5231 for (v = m_aggval; v; v = v->next)
5232 if (v->index == index
5233 && v->offset == offset)
5234 break;
5235 if (!v
5236 || v->by_ref != by_ref
5237 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5238 continue;
5239
5240 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5241 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5242 {
5243 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5244 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5245 else if (TYPE_SIZE (TREE_TYPE (rhs))
5246 == TYPE_SIZE (TREE_TYPE (v->value)))
5247 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5248 else
5249 {
5250 if (dump_file)
5251 {
5252 fprintf (dump_file, " const ");
5253 print_generic_expr (dump_file, v->value, 0);
5254 fprintf (dump_file, " can't be converted to type of ");
5255 print_generic_expr (dump_file, rhs, 0);
5256 fprintf (dump_file, "\n");
5257 }
5258 continue;
5259 }
5260 }
5261 else
5262 val = v->value;
5263
5264 if (dump_file && (dump_flags & TDF_DETAILS))
5265 {
5266 fprintf (dump_file, "Modifying stmt:\n ");
5267 print_gimple_stmt (dump_file, stmt, 0, 0);
5268 }
5269 gimple_assign_set_rhs_from_tree (&gsi, val);
5270 update_stmt (stmt);
5271
5272 if (dump_file && (dump_flags & TDF_DETAILS))
5273 {
5274 fprintf (dump_file, "into:\n ");
5275 print_gimple_stmt (dump_file, stmt, 0, 0);
5276 fprintf (dump_file, "\n");
5277 }
5278
5279 *m_something_changed = true;
5280 if (maybe_clean_eh_stmt (stmt)
5281 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5282 *m_cfg_changed = true;
5283 }
5284
5285 }
5286
5287 /* Update alignment of formal parameters as described in
5288 ipcp_transformation_summary. */
5289
5290 static void
5291 ipcp_update_alignments (struct cgraph_node *node)
5292 {
5293 tree fndecl = node->decl;
5294 tree parm = DECL_ARGUMENTS (fndecl);
5295 tree next_parm = parm;
5296 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5297 if (!ts || vec_safe_length (ts->alignments) == 0)
5298 return;
5299 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5300 unsigned count = alignments.length ();
5301
5302 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5303 {
5304 if (node->clone.combined_args_to_skip
5305 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5306 continue;
5307 gcc_checking_assert (parm);
5308 next_parm = DECL_CHAIN (parm);
5309
5310 if (!alignments[i].known || !is_gimple_reg (parm))
5311 continue;
5312 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5313 if (!ddef)
5314 continue;
5315
5316 if (dump_file)
5317 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5318 "misalignment to %u\n", i, alignments[i].align,
5319 alignments[i].misalign);
5320
5321 struct ptr_info_def *pi = get_ptr_info (ddef);
5322 gcc_checking_assert (pi);
5323 unsigned old_align;
5324 unsigned old_misalign;
5325 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5326
5327 if (old_known
5328 && old_align >= alignments[i].align)
5329 {
5330 if (dump_file)
5331 fprintf (dump_file, " But the alignment was already %u.\n",
5332 old_align);
5333 continue;
5334 }
5335 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5336 }
5337 }
5338
5339 /* IPCP transformation phase doing propagation of aggregate values. */
5340
5341 unsigned int
5342 ipcp_transform_function (struct cgraph_node *node)
5343 {
5344 vec<ipa_param_descriptor> descriptors = vNULL;
5345 struct func_body_info fbi;
5346 struct ipa_agg_replacement_value *aggval;
5347 int param_count;
5348 bool cfg_changed = false, something_changed = false;
5349
5350 gcc_checking_assert (cfun);
5351 gcc_checking_assert (current_function_decl);
5352
5353 if (dump_file)
5354 fprintf (dump_file, "Modification phase of node %s/%i\n",
5355 node->name (), node->order);
5356
5357 ipcp_update_alignments (node);
5358 aggval = ipa_get_agg_replacements_for_node (node);
5359 if (!aggval)
5360 return 0;
5361 param_count = count_formal_params (node->decl);
5362 if (param_count == 0)
5363 return 0;
5364 adjust_agg_replacement_values (node, aggval);
5365 if (dump_file)
5366 ipa_dump_agg_replacement_values (dump_file, aggval);
5367
5368 fbi.node = node;
5369 fbi.info = NULL;
5370 fbi.bb_infos = vNULL;
5371 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5372 fbi.param_count = param_count;
5373 fbi.aa_walked = 0;
5374
5375 descriptors.safe_grow_cleared (param_count);
5376 ipa_populate_param_decls (node, descriptors);
5377 calculate_dominance_info (CDI_DOMINATORS);
5378 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5379 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5380
5381 int i;
5382 struct ipa_bb_info *bi;
5383 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5384 free_ipa_bb_info (bi);
5385 fbi.bb_infos.release ();
5386 free_dominance_info (CDI_DOMINATORS);
5387 (*ipcp_transformations)[node->uid].agg_values = NULL;
5388 (*ipcp_transformations)[node->uid].alignments = NULL;
5389 descriptors.release ();
5390
5391 if (!something_changed)
5392 return 0;
5393 else if (cfg_changed)
5394 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5395 else
5396 return TODO_update_ssa_only_virtuals;
5397 }